sde_rotator_util.c 31 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2012, 2015-2019, 2021, The Linux Foundation. All rights reserved.
  4. */
  5. #define pr_fmt(fmt) "%s: " fmt, __func__
  6. #include <linux/qcom-dma-mapping.h>
  7. #include <linux/dma-mapping.h>
  8. #include <linux/errno.h>
  9. #include <linux/file.h>
  10. #include <linux/spinlock.h>
  11. #include <linux/types.h>
  12. #include <linux/major.h>
  13. #include <linux/dma-buf.h>
  14. #include <linux/debugfs.h>
  15. #include <linux/delay.h>
  16. #include <linux/wait.h>
  17. #include <linux/clk.h>
  18. #include <linux/slab.h>
  19. #include <linux/io.h>
  20. #include <linux/iopoll.h>
  21. #include <linux/regulator/consumer.h>
  22. #include <media/msm_media_info.h>
  23. #include <linux/videodev2.h>
  24. #include <linux/ion.h>
  25. #include "sde_rotator_util.h"
  26. #include "sde_rotator_smmu.h"
  27. #include "sde_rotator_debug.h"
  28. #define Y_TILEWIDTH 48
  29. #define Y_TILEHEIGHT 4
  30. #define UV_TILEWIDTH 48
  31. #define UV_TILEHEIGHT 8
  32. #define TILEWIDTH_SIZE 64
  33. #define TILEHEIGHT_SIZE 4
  34. void sde_mdp_get_v_h_subsample_rate(u8 chroma_sample,
  35. u8 *v_sample, u8 *h_sample)
  36. {
  37. switch (chroma_sample) {
  38. case SDE_MDP_CHROMA_H2V1:
  39. *v_sample = 1;
  40. *h_sample = 2;
  41. break;
  42. case SDE_MDP_CHROMA_H1V2:
  43. *v_sample = 2;
  44. *h_sample = 1;
  45. break;
  46. case SDE_MDP_CHROMA_420:
  47. *v_sample = 2;
  48. *h_sample = 2;
  49. break;
  50. default:
  51. *v_sample = 1;
  52. *h_sample = 1;
  53. break;
  54. }
  55. }
  56. void sde_rot_intersect_rect(struct sde_rect *res_rect,
  57. const struct sde_rect *dst_rect,
  58. const struct sde_rect *sci_rect)
  59. {
  60. int l = max(dst_rect->x, sci_rect->x);
  61. int t = max(dst_rect->y, sci_rect->y);
  62. int r = min((dst_rect->x + dst_rect->w), (sci_rect->x + sci_rect->w));
  63. int b = min((dst_rect->y + dst_rect->h), (sci_rect->y + sci_rect->h));
  64. if (r < l || b < t)
  65. *res_rect = (struct sde_rect){0, 0, 0, 0};
  66. else
  67. *res_rect = (struct sde_rect){l, t, (r-l), (b-t)};
  68. }
  69. void sde_rot_crop_rect(struct sde_rect *src_rect,
  70. struct sde_rect *dst_rect,
  71. const struct sde_rect *sci_rect)
  72. {
  73. struct sde_rect res;
  74. sde_rot_intersect_rect(&res, dst_rect, sci_rect);
  75. if (res.w && res.h) {
  76. if ((res.w != dst_rect->w) || (res.h != dst_rect->h)) {
  77. src_rect->x = src_rect->x + (res.x - dst_rect->x);
  78. src_rect->y = src_rect->y + (res.y - dst_rect->y);
  79. src_rect->w = res.w;
  80. src_rect->h = res.h;
  81. }
  82. *dst_rect = (struct sde_rect)
  83. {(res.x - sci_rect->x), (res.y - sci_rect->y),
  84. res.w, res.h};
  85. }
  86. }
  87. /*
  88. * sde_rect_cmp() - compares two rects
  89. * @rect1 - rect value to compare
  90. * @rect2 - rect value to compare
  91. *
  92. * Returns 1 if the rects are same, 0 otherwise.
  93. */
  94. int sde_rect_cmp(struct sde_rect *rect1, struct sde_rect *rect2)
  95. {
  96. return rect1->x == rect2->x && rect1->y == rect2->y &&
  97. rect1->w == rect2->w && rect1->h == rect2->h;
  98. }
  99. /*
  100. * sde_rect_overlap_check() - compare two rects and check if they overlap
  101. * @rect1 - rect value to compare
  102. * @rect2 - rect value to compare
  103. *
  104. * Returns true if rects overlap, false otherwise.
  105. */
  106. bool sde_rect_overlap_check(struct sde_rect *rect1, struct sde_rect *rect2)
  107. {
  108. u32 rect1_left = rect1->x, rect1_right = rect1->x + rect1->w;
  109. u32 rect1_top = rect1->y, rect1_bottom = rect1->y + rect1->h;
  110. u32 rect2_left = rect2->x, rect2_right = rect2->x + rect2->w;
  111. u32 rect2_top = rect2->y, rect2_bottom = rect2->y + rect2->h;
  112. if ((rect1_right <= rect2_left) ||
  113. (rect1_left >= rect2_right) ||
  114. (rect1_bottom <= rect2_top) ||
  115. (rect1_top >= rect2_bottom))
  116. return false;
  117. return true;
  118. }
  119. int sde_mdp_get_rau_strides(u32 w, u32 h,
  120. struct sde_mdp_format_params *fmt,
  121. struct sde_mdp_plane_sizes *ps)
  122. {
  123. if (fmt->is_yuv) {
  124. ps->rau_cnt = DIV_ROUND_UP(w, 64);
  125. ps->ystride[0] = 64 * 4;
  126. ps->rau_h[0] = 4;
  127. ps->rau_h[1] = 2;
  128. if (fmt->chroma_sample == SDE_MDP_CHROMA_H1V2)
  129. ps->ystride[1] = 64 * 2;
  130. else if (fmt->chroma_sample == SDE_MDP_CHROMA_H2V1) {
  131. ps->ystride[1] = 32 * 4;
  132. ps->rau_h[1] = 4;
  133. } else
  134. ps->ystride[1] = 32 * 2;
  135. /* account for both chroma components */
  136. ps->ystride[1] <<= 1;
  137. } else if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  138. ps->rau_cnt = DIV_ROUND_UP(w, 32);
  139. ps->ystride[0] = 32 * 4 * fmt->bpp;
  140. ps->ystride[1] = 0;
  141. ps->rau_h[0] = 4;
  142. ps->rau_h[1] = 0;
  143. } else {
  144. SDEROT_ERR("Invalid format=%d\n", fmt->format);
  145. return -EINVAL;
  146. }
  147. ps->ystride[0] *= ps->rau_cnt;
  148. ps->ystride[1] *= ps->rau_cnt;
  149. ps->num_planes = 2;
  150. SDEROT_DBG("BWC rau_cnt=%d strides={%d,%d} heights={%d,%d}\n",
  151. ps->rau_cnt, ps->ystride[0], ps->ystride[1],
  152. ps->rau_h[0], ps->rau_h[1]);
  153. return 0;
  154. }
  155. static int sde_mdp_get_a5x_plane_size(struct sde_mdp_format_params *fmt,
  156. u32 width, u32 height, struct sde_mdp_plane_sizes *ps)
  157. {
  158. int rc = 0;
  159. if (sde_mdp_is_nv12_8b_format(fmt)) {
  160. ps->num_planes = 2;
  161. /* Y bitstream stride and plane size */
  162. ps->ystride[0] = ALIGN(width, 128);
  163. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 32),
  164. 4096);
  165. /* CbCr bitstream stride and plane size */
  166. ps->ystride[1] = ALIGN(width, 128);
  167. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  168. ALIGN(height / 2, 32), 4096);
  169. if (!sde_mdp_is_ubwc_format(fmt))
  170. goto done;
  171. ps->num_planes += 2;
  172. /* Y meta data stride and plane size */
  173. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  174. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  175. ALIGN(DIV_ROUND_UP(height, 8), 16), 4096);
  176. /* CbCr meta data stride and plane size */
  177. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  178. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  179. ALIGN(DIV_ROUND_UP(height / 2, 8), 16), 4096);
  180. } else if (sde_mdp_is_p010_format(fmt)) {
  181. ps->num_planes = 2;
  182. /* Y bitstream stride and plane size */
  183. ps->ystride[0] = ALIGN(width * 2, 256);
  184. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 16),
  185. 4096);
  186. /* CbCr bitstream stride and plane size */
  187. ps->ystride[1] = ALIGN(width * 2, 256);
  188. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  189. ALIGN(height / 2, 16), 4096);
  190. if (!sde_mdp_is_ubwc_format(fmt))
  191. goto done;
  192. ps->num_planes += 2;
  193. /* Y meta data stride and plane size */
  194. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  195. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  196. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  197. /* CbCr meta data stride and plane size */
  198. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  199. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  200. ALIGN(DIV_ROUND_UP(height / 2, 4), 16), 4096);
  201. } else if (sde_mdp_is_tp10_format(fmt)) {
  202. u32 yWidth = sde_mdp_general_align(width, 192);
  203. u32 yHeight = ALIGN(height, 16);
  204. u32 uvWidth = sde_mdp_general_align(width, 192);
  205. u32 uvHeight = ALIGN(height, 32);
  206. ps->num_planes = 2;
  207. /* Y bitstream stride and plane size */
  208. ps->ystride[0] = yWidth * TILEWIDTH_SIZE / Y_TILEWIDTH;
  209. ps->plane_size[0] = ALIGN(ps->ystride[0] *
  210. (yHeight * TILEHEIGHT_SIZE / Y_TILEHEIGHT),
  211. 4096);
  212. /* CbCr bitstream stride and plane size */
  213. ps->ystride[1] = uvWidth * TILEWIDTH_SIZE / UV_TILEWIDTH;
  214. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  215. (uvHeight * TILEHEIGHT_SIZE / UV_TILEHEIGHT),
  216. 4096);
  217. if (!sde_mdp_is_ubwc_format(fmt))
  218. goto done;
  219. ps->num_planes += 2;
  220. /* Y meta data stride and plane size */
  221. ps->ystride[2] = ALIGN(yWidth / Y_TILEWIDTH, 64);
  222. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  223. ALIGN((yHeight / Y_TILEHEIGHT), 16), 4096);
  224. /* CbCr meta data stride and plane size */
  225. ps->ystride[3] = ALIGN(uvWidth / UV_TILEWIDTH, 64);
  226. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  227. ALIGN((uvHeight / UV_TILEHEIGHT), 16), 4096);
  228. } else if (sde_mdp_is_rgb_format(fmt)) {
  229. uint32_t stride_alignment, bpp, aligned_bitstream_width;
  230. if (fmt->format == SDE_PIX_FMT_RGB_565_UBWC) {
  231. stride_alignment = 128;
  232. bpp = 2;
  233. } else {
  234. stride_alignment = 64;
  235. bpp = 4;
  236. }
  237. ps->num_planes = 1;
  238. /* RGB bitstream stride and plane size */
  239. aligned_bitstream_width = ALIGN(width, stride_alignment);
  240. ps->ystride[0] = aligned_bitstream_width * bpp;
  241. ps->plane_size[0] = ALIGN(bpp * aligned_bitstream_width *
  242. ALIGN(height, 16), 4096);
  243. if (!sde_mdp_is_ubwc_format(fmt))
  244. goto done;
  245. ps->num_planes += 1;
  246. /* RGB meta data stride and plane size */
  247. ps->ystride[2] = ALIGN(DIV_ROUND_UP(aligned_bitstream_width,
  248. 16), 64);
  249. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  250. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  251. } else {
  252. SDEROT_ERR("%s: UBWC format not supported for fmt:%d\n",
  253. __func__, fmt->format);
  254. rc = -EINVAL;
  255. }
  256. done:
  257. return rc;
  258. }
  259. int sde_mdp_get_plane_sizes(struct sde_mdp_format_params *fmt, u32 w, u32 h,
  260. struct sde_mdp_plane_sizes *ps, u32 bwc_mode, bool rotation)
  261. {
  262. int i, rc = 0;
  263. u32 bpp;
  264. if (ps == NULL)
  265. return -EINVAL;
  266. if ((w > SDE_ROT_MAX_IMG_WIDTH) || (h > SDE_ROT_MAX_IMG_HEIGHT))
  267. return -ERANGE;
  268. bpp = fmt->bpp;
  269. memset(ps, 0, sizeof(struct sde_mdp_plane_sizes));
  270. if (sde_mdp_is_tilea5x_format(fmt)) {
  271. rc = sde_mdp_get_a5x_plane_size(fmt, w, h, ps);
  272. } else if (bwc_mode) {
  273. u32 height, meta_size;
  274. rc = sde_mdp_get_rau_strides(w, h, fmt, ps);
  275. if (rc)
  276. return rc;
  277. height = DIV_ROUND_UP(h, ps->rau_h[0]);
  278. meta_size = DIV_ROUND_UP(ps->rau_cnt, 8);
  279. ps->ystride[1] += meta_size;
  280. ps->ystride[0] += ps->ystride[1] + meta_size;
  281. ps->plane_size[0] = ps->ystride[0] * height;
  282. ps->ystride[1] = 2;
  283. ps->plane_size[1] = 2 * ps->rau_cnt * height;
  284. SDEROT_DBG("BWC data stride=%d size=%d meta size=%d\n",
  285. ps->ystride[0], ps->plane_size[0], ps->plane_size[1]);
  286. } else {
  287. if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  288. ps->num_planes = 1;
  289. ps->plane_size[0] = w * h * bpp;
  290. ps->ystride[0] = w * bpp;
  291. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_VENUS ||
  292. fmt->format == SDE_PIX_FMT_Y_CRCB_H2V2_VENUS ||
  293. fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS) {
  294. int cf;
  295. switch (fmt->format) {
  296. case SDE_PIX_FMT_Y_CBCR_H2V2_VENUS:
  297. cf = COLOR_FMT_NV12;
  298. break;
  299. case SDE_PIX_FMT_Y_CRCB_H2V2_VENUS:
  300. cf = COLOR_FMT_NV21;
  301. break;
  302. case SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS:
  303. cf = COLOR_FMT_P010;
  304. break;
  305. default:
  306. SDEROT_ERR("unknown color format %d\n",
  307. fmt->format);
  308. return -EINVAL;
  309. }
  310. ps->num_planes = 2;
  311. ps->ystride[0] = VENUS_Y_STRIDE(cf, w);
  312. ps->ystride[1] = VENUS_UV_STRIDE(cf, w);
  313. ps->plane_size[0] = VENUS_Y_SCANLINES(cf, h) *
  314. ps->ystride[0];
  315. ps->plane_size[1] = VENUS_UV_SCANLINES(cf, h) *
  316. ps->ystride[1];
  317. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010) {
  318. /*
  319. * |<---Y1--->000000<---Y0--->000000| Plane0
  320. * |rrrrrrrrrr000000bbbbbbbbbb000000| Plane1
  321. * |--------------------------------|
  322. * 33222222222211111111110000000000 Bit
  323. * 10987654321098765432109876543210 Location
  324. */
  325. ps->num_planes = 2;
  326. ps->ystride[0] = w * 2;
  327. ps->ystride[1] = w * 2;
  328. ps->plane_size[0] = ps->ystride[0] * h;
  329. ps->plane_size[1] = ps->ystride[1] * h / 2;
  330. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_TP10) {
  331. u32 yWidth = sde_mdp_general_align(w, 192);
  332. u32 yHeight = ALIGN(h, 16);
  333. u32 uvWidth = sde_mdp_general_align(w, 192);
  334. u32 uvHeight = (ALIGN(h, 32)) / 2;
  335. ps->num_planes = 2;
  336. ps->ystride[0] = (yWidth / 3) * 4;
  337. ps->ystride[1] = (uvWidth / 3) * 4;
  338. ps->plane_size[0] = ALIGN(ps->ystride[0] * yHeight,
  339. 4096);
  340. ps->plane_size[1] = ALIGN(ps->ystride[1] * uvHeight,
  341. 4096);
  342. } else {
  343. u8 v_subsample, h_subsample, stride_align, height_align;
  344. u32 chroma_samp;
  345. chroma_samp = fmt->chroma_sample;
  346. sde_mdp_get_v_h_subsample_rate(chroma_samp,
  347. &v_subsample, &h_subsample);
  348. switch (fmt->format) {
  349. case SDE_PIX_FMT_Y_CR_CB_GH2V2:
  350. stride_align = 16;
  351. height_align = 1;
  352. break;
  353. default:
  354. stride_align = 1;
  355. height_align = 1;
  356. break;
  357. }
  358. ps->ystride[0] = ALIGN(w, stride_align);
  359. ps->ystride[1] = ALIGN(w / h_subsample, stride_align);
  360. ps->plane_size[0] = ps->ystride[0] *
  361. ALIGN(h, height_align);
  362. ps->plane_size[1] = ps->ystride[1] * (h / v_subsample);
  363. if (fmt->fetch_planes == SDE_MDP_PLANE_PSEUDO_PLANAR) {
  364. ps->num_planes = 2;
  365. ps->plane_size[1] *= 2;
  366. ps->ystride[1] *= 2;
  367. } else { /* planar */
  368. ps->num_planes = 3;
  369. ps->plane_size[2] = ps->plane_size[1];
  370. ps->ystride[2] = ps->ystride[1];
  371. }
  372. }
  373. }
  374. /* Safe to use MAX_PLANES as ps is memset at start of function */
  375. for (i = 0; i < SDE_ROT_MAX_PLANES; i++)
  376. ps->total_size += ps->plane_size[i];
  377. return rc;
  378. }
  379. static int sde_mdp_a5x_data_check(struct sde_mdp_data *data,
  380. struct sde_mdp_plane_sizes *ps,
  381. struct sde_mdp_format_params *fmt)
  382. {
  383. int i, inc;
  384. unsigned long data_size = 0;
  385. dma_addr_t base_addr;
  386. if (data->p[0].len == ps->plane_size[0])
  387. goto end;
  388. /* From this point, assumption is plane 0 is to be divided */
  389. data_size = data->p[0].len;
  390. if (data_size < ps->total_size) {
  391. SDEROT_ERR(
  392. "insufficient current mem len=%lu required mem len=%u\n",
  393. data_size, ps->total_size);
  394. return -ENOMEM;
  395. }
  396. base_addr = data->p[0].addr;
  397. if (sde_mdp_is_yuv_format(fmt)) {
  398. /************************************************/
  399. /* UBWC ** */
  400. /* buffer ** MDP PLANE */
  401. /* format ** */
  402. /************************************************/
  403. /* ------------------- ** -------------------- */
  404. /* | Y meta | ** | Y bitstream | */
  405. /* | data | ** | plane | */
  406. /* ------------------- ** -------------------- */
  407. /* | Y bitstream | ** | CbCr bitstream | */
  408. /* | data | ** | plane | */
  409. /* ------------------- ** -------------------- */
  410. /* | Cbcr metadata | ** | Y meta | */
  411. /* | data | ** | plane | */
  412. /* ------------------- ** -------------------- */
  413. /* | CbCr bitstream | ** | CbCr meta | */
  414. /* | data | ** | plane | */
  415. /* ------------------- ** -------------------- */
  416. /************************************************/
  417. /* configure Y bitstream plane */
  418. data->p[0].addr = base_addr + ps->plane_size[2];
  419. data->p[0].len = ps->plane_size[0];
  420. /* configure CbCr bitstream plane */
  421. data->p[1].addr = base_addr + ps->plane_size[0]
  422. + ps->plane_size[2] + ps->plane_size[3];
  423. data->p[1].len = ps->plane_size[1];
  424. if (!sde_mdp_is_ubwc_format(fmt))
  425. goto done;
  426. /* configure Y metadata plane */
  427. data->p[2].addr = base_addr;
  428. data->p[2].len = ps->plane_size[2];
  429. /* configure CbCr metadata plane */
  430. data->p[3].addr = base_addr + ps->plane_size[0]
  431. + ps->plane_size[2];
  432. data->p[3].len = ps->plane_size[3];
  433. } else {
  434. /************************************************/
  435. /* UBWC ** */
  436. /* buffer ** MDP PLANE */
  437. /* format ** */
  438. /************************************************/
  439. /* ------------------- ** -------------------- */
  440. /* | RGB meta | ** | RGB bitstream | */
  441. /* | data | ** | plane | */
  442. /* ------------------- ** -------------------- */
  443. /* | RGB bitstream | ** | NONE | */
  444. /* | data | ** | | */
  445. /* ------------------- ** -------------------- */
  446. /* ** | RGB meta | */
  447. /* ** | plane | */
  448. /* ** -------------------- */
  449. /************************************************/
  450. /* configure RGB bitstream plane */
  451. data->p[0].addr = base_addr + ps->plane_size[2];
  452. data->p[0].len = ps->plane_size[0];
  453. if (!sde_mdp_is_ubwc_format(fmt))
  454. goto done;
  455. /* configure RGB metadata plane */
  456. data->p[2].addr = base_addr;
  457. data->p[2].len = ps->plane_size[2];
  458. }
  459. done:
  460. data->num_planes = ps->num_planes;
  461. end:
  462. if (data->num_planes != ps->num_planes) {
  463. SDEROT_ERR("num_planes don't match: fmt:%d, data:%d, ps:%d\n",
  464. fmt->format, data->num_planes, ps->num_planes);
  465. return -EINVAL;
  466. }
  467. inc = (sde_mdp_is_yuv_format(fmt) ? 1 : 2);
  468. for (i = 0; i < SDE_ROT_MAX_PLANES; i += inc) {
  469. if (data->p[i].len != ps->plane_size[i]) {
  470. SDEROT_ERR(
  471. "plane:%d fmt:%d, len does not match: data:%lu, ps:%d\n",
  472. i, fmt->format, data->p[i].len,
  473. ps->plane_size[i]);
  474. return -EINVAL;
  475. }
  476. }
  477. return 0;
  478. }
  479. int sde_mdp_data_check(struct sde_mdp_data *data,
  480. struct sde_mdp_plane_sizes *ps,
  481. struct sde_mdp_format_params *fmt)
  482. {
  483. struct sde_mdp_img_data *prev, *curr;
  484. int i;
  485. if (!ps)
  486. return 0;
  487. if (!data || data->num_planes == 0)
  488. return -ENOMEM;
  489. if (sde_mdp_is_tilea5x_format(fmt))
  490. return sde_mdp_a5x_data_check(data, ps, fmt);
  491. SDEROT_DBG("srcp0=%pa len=%lu frame_size=%u\n", &data->p[0].addr,
  492. data->p[0].len, ps->total_size);
  493. for (i = 0; i < ps->num_planes; i++) {
  494. curr = &data->p[i];
  495. if (i >= data->num_planes) {
  496. u32 psize = ps->plane_size[i-1];
  497. prev = &data->p[i-1];
  498. if (prev->len > psize) {
  499. curr->len = prev->len - psize;
  500. prev->len = psize;
  501. }
  502. curr->addr = prev->addr + psize;
  503. }
  504. if (curr->len < ps->plane_size[i]) {
  505. SDEROT_ERR("insufficient mem=%lu p=%d len=%u\n",
  506. curr->len, i, ps->plane_size[i]);
  507. return -ENOMEM;
  508. }
  509. SDEROT_DBG("plane[%d] addr=%pa len=%lu\n", i,
  510. &curr->addr, curr->len);
  511. }
  512. data->num_planes = ps->num_planes;
  513. return 0;
  514. }
  515. int sde_validate_offset_for_ubwc_format(
  516. struct sde_mdp_format_params *fmt, u16 x, u16 y)
  517. {
  518. int ret;
  519. u16 micro_w = 0, micro_h = 0;
  520. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  521. if (ret || !micro_w || !micro_h) {
  522. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  523. return -EINVAL;
  524. }
  525. if (x % (micro_w * UBWC_META_MACRO_W_H)) {
  526. SDEROT_ERR("x=%d does not align with meta width=%d\n", x,
  527. micro_w * UBWC_META_MACRO_W_H);
  528. return -EINVAL;
  529. }
  530. if (y % (micro_h * UBWC_META_MACRO_W_H)) {
  531. SDEROT_ERR("y=%d does not align with meta height=%d\n", y,
  532. UBWC_META_MACRO_W_H);
  533. return -EINVAL;
  534. }
  535. return ret;
  536. }
  537. /* x and y are assumed to be valid, expected to line up with start of tiles */
  538. void sde_rot_ubwc_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  539. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  540. {
  541. u16 macro_w, micro_w, micro_h;
  542. u32 offset = 0;
  543. int ret;
  544. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  545. if (ret || !micro_w || !micro_h) {
  546. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  547. return;
  548. }
  549. macro_w = 4 * micro_w;
  550. if (sde_mdp_is_nv12_8b_format(fmt)) {
  551. u16 chroma_macro_w = macro_w / 2;
  552. u16 chroma_micro_w = micro_w / 2;
  553. /* plane 1 and 3 are chroma, with sub sample of 2 */
  554. offset = y * ps->ystride[0] +
  555. (x / macro_w) * 4096;
  556. if (offset < data->p[0].len) {
  557. data->p[0].addr += offset;
  558. } else {
  559. ret = 1;
  560. goto done;
  561. }
  562. offset = y / 2 * ps->ystride[1] +
  563. ((x / 2) / chroma_macro_w) * 4096;
  564. if (offset < data->p[1].len) {
  565. data->p[1].addr += offset;
  566. } else {
  567. ret = 2;
  568. goto done;
  569. }
  570. offset = (y / micro_h) * ps->ystride[2] +
  571. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  572. UBWC_META_BLOCK_SIZE;
  573. if (offset < data->p[2].len) {
  574. data->p[2].addr += offset;
  575. } else {
  576. ret = 3;
  577. goto done;
  578. }
  579. offset = ((y / 2) / micro_h) * ps->ystride[3] +
  580. (((x / 2) / chroma_micro_w) / UBWC_META_MACRO_W_H) *
  581. UBWC_META_BLOCK_SIZE;
  582. if (offset < data->p[3].len) {
  583. data->p[3].addr += offset;
  584. } else {
  585. ret = 4;
  586. goto done;
  587. }
  588. } else if (sde_mdp_is_nv12_10b_format(fmt)) {
  589. /* TODO: */
  590. SDEROT_ERR("%c%c%c%c format not implemented yet",
  591. fmt->format >> 0, fmt->format >> 8,
  592. fmt->format >> 16, fmt->format >> 24);
  593. ret = 1;
  594. goto done;
  595. } else {
  596. offset = y * ps->ystride[0] +
  597. (x / macro_w) * 4096;
  598. if (offset < data->p[0].len) {
  599. data->p[0].addr += offset;
  600. } else {
  601. ret = 1;
  602. goto done;
  603. }
  604. offset = DIV_ROUND_UP(y, micro_h) * ps->ystride[2] +
  605. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  606. UBWC_META_BLOCK_SIZE;
  607. if (offset < data->p[2].len) {
  608. data->p[2].addr += offset;
  609. } else {
  610. ret = 3;
  611. goto done;
  612. }
  613. }
  614. done:
  615. if (ret) {
  616. WARN(1, "idx %d, offsets:%u too large for buflen%lu\n",
  617. (ret - 1), offset, data->p[(ret - 1)].len);
  618. }
  619. }
  620. void sde_rot_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  621. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  622. {
  623. if ((x == 0) && (y == 0))
  624. return;
  625. if (sde_mdp_is_tilea5x_format(fmt)) {
  626. sde_rot_ubwc_data_calc_offset(data, x, y, ps, fmt);
  627. return;
  628. }
  629. data->p[0].addr += y * ps->ystride[0];
  630. if (data->num_planes == 1) {
  631. data->p[0].addr += x * fmt->bpp;
  632. } else {
  633. u16 xoff, yoff;
  634. u8 v_subsample, h_subsample;
  635. sde_mdp_get_v_h_subsample_rate(fmt->chroma_sample,
  636. &v_subsample, &h_subsample);
  637. xoff = x / h_subsample;
  638. yoff = y / v_subsample;
  639. data->p[0].addr += x;
  640. data->p[1].addr += xoff + (yoff * ps->ystride[1]);
  641. if (data->num_planes == 2) /* pseudo planar */
  642. data->p[1].addr += xoff;
  643. else /* planar */
  644. data->p[2].addr += xoff + (yoff * ps->ystride[2]);
  645. }
  646. }
  647. static int sde_smmu_get_domain_type(u32 flags, bool rotator)
  648. {
  649. int type;
  650. if (flags & SDE_SECURE_OVERLAY_SESSION)
  651. type = SDE_IOMMU_DOMAIN_ROT_SECURE;
  652. else
  653. type = SDE_IOMMU_DOMAIN_ROT_UNSECURE;
  654. return type;
  655. }
  656. static int sde_mdp_is_map_needed(struct sde_mdp_img_data *data)
  657. {
  658. if (data->flags & SDE_SECURE_CAMERA_SESSION)
  659. return false;
  660. return true;
  661. }
  662. static int sde_mdp_put_img(struct sde_mdp_img_data *data, bool rotator,
  663. int dir)
  664. {
  665. u32 domain;
  666. if (data->flags & SDE_ROT_EXT_IOVA) {
  667. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  668. &data->addr, data->len);
  669. return 0;
  670. }
  671. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  672. SDEROT_DBG("ion hdl=%pK buf=0x%pa\n", data->srcp_dma_buf,
  673. &data->addr);
  674. if (sde_mdp_is_map_needed(data) && data->mapped) {
  675. domain = sde_smmu_get_domain_type(data->flags,
  676. rotator);
  677. data->mapped = false;
  678. SDEROT_DBG("unmap %pad/%lx d:%u f:%x\n", &data->addr,
  679. data->len, domain, data->flags);
  680. }
  681. if (!data->skip_detach) {
  682. data->srcp_attachment->dma_map_attrs |=
  683. DMA_ATTR_DELAYED_UNMAP;
  684. dma_buf_unmap_attachment(data->srcp_attachment,
  685. data->srcp_table, dir);
  686. dma_buf_detach(data->srcp_dma_buf,
  687. data->srcp_attachment);
  688. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  689. dma_buf_put(data->srcp_dma_buf);
  690. data->srcp_dma_buf = NULL;
  691. }
  692. data->skip_detach = true;
  693. }
  694. } else {
  695. return -ENOMEM;
  696. }
  697. return 0;
  698. }
  699. static int sde_mdp_get_img(struct sde_fb_data *img,
  700. struct sde_mdp_img_data *data, struct device *dev,
  701. bool rotator, int dir)
  702. {
  703. int ret = -EINVAL;
  704. u32 domain;
  705. data->flags |= img->flags;
  706. data->offset = img->offset;
  707. if (data->flags & SDE_ROT_EXT_DMA_BUF) {
  708. data->srcp_dma_buf = img->buffer;
  709. } else if (data->flags & SDE_ROT_EXT_IOVA) {
  710. data->addr = img->addr;
  711. data->len = img->len;
  712. SDEROT_DBG("use client %pad/%lx\n", &data->addr, data->len);
  713. return 0;
  714. } else if (IS_ERR(data->srcp_dma_buf)) {
  715. SDEROT_ERR("error on ion_import_fd\n");
  716. ret = PTR_ERR(data->srcp_dma_buf);
  717. data->srcp_dma_buf = NULL;
  718. return ret;
  719. }
  720. if (sde_mdp_is_map_needed(data)) {
  721. domain = sde_smmu_get_domain_type(data->flags, rotator);
  722. SDEROT_DBG("%d domain=%d ihndl=%pK\n",
  723. __LINE__, domain, data->srcp_dma_buf);
  724. data->srcp_attachment =
  725. sde_smmu_dma_buf_attach(data->srcp_dma_buf, dev,
  726. domain);
  727. if (IS_ERR(data->srcp_attachment)) {
  728. SDEROT_ERR("%d Failed to attach dma buf\n", __LINE__);
  729. ret = PTR_ERR(data->srcp_attachment);
  730. goto err_put;
  731. }
  732. } else {
  733. data->srcp_attachment = dma_buf_attach(
  734. data->srcp_dma_buf, dev);
  735. if (IS_ERR(data->srcp_attachment)) {
  736. SDEROT_ERR(
  737. "Failed to attach dma buf for secure camera\n");
  738. ret = PTR_ERR(data->srcp_attachment);
  739. goto err_put;
  740. }
  741. }
  742. SDEROT_DBG("%d attach=%pK\n", __LINE__, data->srcp_attachment);
  743. data->addr = 0;
  744. data->len = 0;
  745. data->mapped = false;
  746. data->skip_detach = false;
  747. /* return early, mapping will be done later */
  748. return 0;
  749. err_put:
  750. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  751. dma_buf_put(data->srcp_dma_buf);
  752. data->srcp_dma_buf = NULL;
  753. }
  754. return ret;
  755. }
  756. static int sde_mdp_map_buffer(struct sde_mdp_img_data *data, bool rotator,
  757. int dir)
  758. {
  759. int ret = -EINVAL;
  760. struct scatterlist *sg;
  761. struct sg_table *sgt = NULL;
  762. unsigned int i;
  763. unsigned long flags = 0;
  764. if (data->addr && data->len)
  765. return 0;
  766. if (data->flags & SDE_ROT_EXT_IOVA) {
  767. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  768. &data->addr, data->len);
  769. return 0;
  770. }
  771. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  772. /*
  773. * dma_buf_map_attachment will call into
  774. * dma_map_sg_attrs, and so all cache maintenance
  775. * attribute and lazy unmap attribute will be all
  776. * provided here.
  777. */
  778. data->srcp_attachment->dma_map_attrs |=
  779. DMA_ATTR_DELAYED_UNMAP;
  780. if (data->srcp_dma_buf && data->srcp_dma_buf->ops &&
  781. data->srcp_dma_buf->ops->get_flags) {
  782. if (data->srcp_dma_buf->ops->get_flags(
  783. data->srcp_dma_buf,
  784. &flags) == 0) {
  785. if ((flags & ION_FLAG_CACHED) == 0) {
  786. SDEROT_DBG("dmabuf is uncached type\n");
  787. data->srcp_attachment->dma_map_attrs |=
  788. DMA_ATTR_SKIP_CPU_SYNC;
  789. }
  790. }
  791. }
  792. sgt = dma_buf_map_attachment(
  793. data->srcp_attachment, dir);
  794. if (IS_ERR_OR_NULL(sgt) ||
  795. IS_ERR_OR_NULL(sgt->sgl)) {
  796. SDEROT_ERR("Failed to map attachment\n");
  797. ret = PTR_ERR(sgt);
  798. goto err_detach;
  799. }
  800. data->srcp_table = sgt;
  801. data->len = 0;
  802. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  803. data->len += sg->length;
  804. }
  805. if (sde_mdp_is_map_needed(data)) {
  806. data->addr = data->srcp_table->sgl->dma_address;
  807. SDEROT_DBG("map %pad/%lx f:%x\n",
  808. &data->addr,
  809. data->len,
  810. data->flags);
  811. data->mapped = true;
  812. ret = 0;
  813. } else {
  814. if (sgt->nents != 1) {
  815. SDEROT_ERR(
  816. "Fail ion buffer mapping for secure camera\n");
  817. ret = -EINVAL;
  818. goto err_unmap;
  819. }
  820. if (((uint64_t)sg_dma_address(sgt->sgl) >=
  821. PHY_ADDR_4G - sgt->sgl->length)) {
  822. SDEROT_ERR(
  823. "ion buffer mapped size invalid, size=%d\n",
  824. sgt->sgl->length);
  825. ret = -EINVAL;
  826. goto err_unmap;
  827. }
  828. data->addr = sg_phys(data->srcp_table->sgl);
  829. ret = 0;
  830. }
  831. }
  832. if (!data->addr) {
  833. SDEROT_ERR("start address is zero!\n");
  834. sde_mdp_put_img(data, rotator, dir);
  835. return -ENOMEM;
  836. }
  837. if (!ret && (data->offset < data->len)) {
  838. data->addr += data->offset;
  839. data->len -= data->offset;
  840. SDEROT_DBG("ihdl=%pK buf=0x%pa len=0x%lx\n",
  841. data->srcp_dma_buf, &data->addr, data->len);
  842. } else {
  843. sde_mdp_put_img(data, rotator, dir);
  844. return ret ? : -EOVERFLOW;
  845. }
  846. return ret;
  847. err_unmap:
  848. dma_buf_unmap_attachment(data->srcp_attachment, data->srcp_table, dir);
  849. err_detach:
  850. dma_buf_detach(data->srcp_dma_buf, data->srcp_attachment);
  851. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  852. dma_buf_put(data->srcp_dma_buf);
  853. data->srcp_dma_buf = NULL;
  854. }
  855. return ret;
  856. }
  857. static int sde_mdp_data_get(struct sde_mdp_data *data,
  858. struct sde_fb_data *planes, int num_planes, u32 flags,
  859. struct device *dev, bool rotator, int dir)
  860. {
  861. int i, rc = 0;
  862. if ((num_planes <= 0) || (num_planes > SDE_ROT_MAX_PLANES))
  863. return -EINVAL;
  864. for (i = 0; i < num_planes; i++) {
  865. data->p[i].flags = flags;
  866. rc = sde_mdp_get_img(&planes[i], &data->p[i], dev, rotator,
  867. dir);
  868. if (rc) {
  869. SDEROT_ERR("failed to get buf p=%d flags=%x\n",
  870. i, flags);
  871. while (i > 0) {
  872. i--;
  873. sde_mdp_put_img(&data->p[i], rotator, dir);
  874. }
  875. break;
  876. }
  877. }
  878. data->num_planes = i;
  879. return rc;
  880. }
  881. int sde_mdp_data_map(struct sde_mdp_data *data, bool rotator, int dir)
  882. {
  883. int i, rc = 0;
  884. if (!data || !data->num_planes || data->num_planes > SDE_ROT_MAX_PLANES)
  885. return -EINVAL;
  886. for (i = 0; i < data->num_planes; i++) {
  887. rc = sde_mdp_map_buffer(&data->p[i], rotator, dir);
  888. if (rc) {
  889. SDEROT_ERR("failed to map buf p=%d\n", i);
  890. while (i > 0) {
  891. i--;
  892. sde_mdp_put_img(&data->p[i], rotator, dir);
  893. }
  894. break;
  895. }
  896. }
  897. SDEROT_EVTLOG(data->num_planes, dir, data->p[0].addr, data->p[0].len,
  898. data->p[0].mapped);
  899. return rc;
  900. }
  901. void sde_mdp_data_free(struct sde_mdp_data *data, bool rotator, int dir)
  902. {
  903. int i;
  904. sde_smmu_ctrl(1);
  905. for (i = 0; i < data->num_planes && data->p[i].len; i++)
  906. sde_mdp_put_img(&data->p[i], rotator, dir);
  907. sde_smmu_ctrl(0);
  908. data->num_planes = 0;
  909. }
  910. int sde_mdp_data_get_and_validate_size(struct sde_mdp_data *data,
  911. struct sde_fb_data *planes, int num_planes, u32 flags,
  912. struct device *dev, bool rotator, int dir,
  913. struct sde_layer_buffer *buffer)
  914. {
  915. struct sde_mdp_format_params *fmt;
  916. struct sde_mdp_plane_sizes ps;
  917. int ret, i;
  918. unsigned long total_buf_len = 0;
  919. fmt = sde_get_format_params(buffer->format);
  920. if (!fmt) {
  921. SDEROT_ERR("Format %d not supported\n", buffer->format);
  922. return -EINVAL;
  923. }
  924. ret = sde_mdp_data_get(data, planes, num_planes,
  925. flags, dev, rotator, dir);
  926. if (ret)
  927. return ret;
  928. sde_mdp_get_plane_sizes(fmt, buffer->width, buffer->height, &ps, 0, 0);
  929. for (i = 0; i < num_planes ; i++) {
  930. unsigned long plane_len = (data->p[i].srcp_dma_buf) ?
  931. data->p[i].srcp_dma_buf->size : data->p[i].len;
  932. if (plane_len < planes[i].offset) {
  933. SDEROT_ERR("Offset=%d larger than buffer size=%lu\n",
  934. planes[i].offset, plane_len);
  935. ret = -EINVAL;
  936. goto buf_too_small;
  937. }
  938. total_buf_len += plane_len - planes[i].offset;
  939. }
  940. if (total_buf_len < ps.total_size) {
  941. SDEROT_ERR("Buffer size=%lu, expected size=%d\n",
  942. total_buf_len,
  943. ps.total_size);
  944. ret = -EINVAL;
  945. goto buf_too_small;
  946. }
  947. return 0;
  948. buf_too_small:
  949. sde_mdp_data_free(data, rotator, dir);
  950. return ret;
  951. }
  952. static struct sg_table *sde_rot_dmabuf_map_tiny(
  953. struct dma_buf_attachment *attach, enum dma_data_direction dir)
  954. {
  955. struct sde_mdp_img_data *data = attach->dmabuf->priv;
  956. struct sg_table *sgt;
  957. unsigned int order;
  958. struct page *p;
  959. if (!data) {
  960. SDEROT_ERR("NULL img data\n");
  961. return NULL;
  962. }
  963. if (data->len > PAGE_SIZE) {
  964. SDEROT_ERR("DMA buffer size is larger than %ld, bufsize:%ld\n",
  965. PAGE_SIZE, data->len);
  966. return NULL;
  967. }
  968. order = get_order(data->len);
  969. p = alloc_pages(GFP_KERNEL, order);
  970. if (!p) {
  971. SDEROT_ERR("Fail allocating page for datasize:%ld\n",
  972. data->len);
  973. return NULL;
  974. }
  975. sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
  976. if (!sgt)
  977. goto free_alloc_pages;
  978. /* only alloc a single page */
  979. if (sg_alloc_table(sgt, 1, GFP_KERNEL)) {
  980. SDEROT_ERR("fail sg_alloc_table\n");
  981. goto free_sgt;
  982. }
  983. sg_set_page(sgt->sgl, p, data->len, 0);
  984. if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) {
  985. SDEROT_ERR("fail dma_map_sg\n");
  986. goto free_table;
  987. }
  988. SDEROT_DBG("Successful generate sg_table:%pK datalen:%ld\n",
  989. sgt, data->len);
  990. return sgt;
  991. free_table:
  992. sg_free_table(sgt);
  993. free_sgt:
  994. kfree(sgt);
  995. free_alloc_pages:
  996. __free_pages(p, order);
  997. return NULL;
  998. }
  999. static void sde_rot_dmabuf_unmap(struct dma_buf_attachment *attach,
  1000. struct sg_table *sgt, enum dma_data_direction dir)
  1001. {
  1002. struct scatterlist *sg;
  1003. int i;
  1004. SDEROT_DBG("DMABUF unmap, sgt:%pK\n", sgt);
  1005. dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir);
  1006. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  1007. put_page(sg_page(sg));
  1008. __free_page(sg_page(sg));
  1009. }
  1010. sg_free_table(sgt);
  1011. kfree(sgt);
  1012. }
  1013. static void *sde_rot_dmabuf_no_map(struct dma_buf *buf, unsigned long n)
  1014. {
  1015. SDEROT_WARN("NOT SUPPORTING dmabuf map\n");
  1016. return NULL;
  1017. }
  1018. static void sde_rot_dmabuf_no_unmap(struct dma_buf *buf, unsigned long n,
  1019. void *addr)
  1020. {
  1021. SDEROT_WARN("NOT SUPPORTING dmabuf unmap\n");
  1022. }
  1023. static void sde_rot_dmabuf_release(struct dma_buf *buf)
  1024. {
  1025. SDEROT_DBG("Release dmabuf:%pK\n", buf);
  1026. }
  1027. static int sde_rot_dmabuf_no_mmap(struct dma_buf *buf,
  1028. struct vm_area_struct *vma)
  1029. {
  1030. SDEROT_WARN("NOT SUPPORTING dmabuf mmap\n");
  1031. return -EINVAL;
  1032. }
  1033. static const struct dma_buf_ops sde_rot_dmabuf_ops = {
  1034. .map_dma_buf = sde_rot_dmabuf_map_tiny,
  1035. .unmap_dma_buf = sde_rot_dmabuf_unmap,
  1036. .release = sde_rot_dmabuf_release,
  1037. .map = sde_rot_dmabuf_no_map,
  1038. .unmap = sde_rot_dmabuf_no_unmap,
  1039. .mmap = sde_rot_dmabuf_no_mmap,
  1040. };
  1041. struct dma_buf *sde_rot_get_dmabuf(struct sde_mdp_img_data *data)
  1042. {
  1043. DEFINE_DMA_BUF_EXPORT_INFO(exp_info);
  1044. exp_info.ops = &sde_rot_dmabuf_ops;
  1045. exp_info.size = (size_t)data->len;
  1046. exp_info.flags = O_RDWR;
  1047. exp_info.priv = data;
  1048. return dma_buf_export(&exp_info);
  1049. }