sde_rotator_util.c 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
  4. * Copyright (c) 2012, 2015-2019, 2021, The Linux Foundation. All rights reserved.
  5. */
  6. #define pr_fmt(fmt) "%s: " fmt, __func__
  7. #include <linux/qcom-dma-mapping.h>
  8. #include <linux/dma-mapping.h>
  9. #include <linux/errno.h>
  10. #include <linux/file.h>
  11. #include <linux/spinlock.h>
  12. #include <linux/types.h>
  13. #include <linux/major.h>
  14. #include <linux/dma-buf.h>
  15. #include <linux/debugfs.h>
  16. #include <linux/delay.h>
  17. #include <linux/wait.h>
  18. #include <linux/clk.h>
  19. #include <linux/slab.h>
  20. #include <linux/io.h>
  21. #include <linux/iopoll.h>
  22. #include <linux/regulator/consumer.h>
  23. #include <linux/version.h>
  24. #include <linux/module.h>
  25. #include <media/mmm_color_fmt.h>
  26. #include <linux/videodev2.h>
  27. #include <linux/ion.h>
  28. #include "sde_rotator_util.h"
  29. #include "sde_rotator_smmu.h"
  30. #include "sde_rotator_debug.h"
  31. #define Y_TILEWIDTH 48
  32. #define Y_TILEHEIGHT 4
  33. #define UV_TILEWIDTH 48
  34. #define UV_TILEHEIGHT 8
  35. #define TILEWIDTH_SIZE 64
  36. #define TILEHEIGHT_SIZE 4
  37. void sde_mdp_get_v_h_subsample_rate(u8 chroma_sample,
  38. u8 *v_sample, u8 *h_sample)
  39. {
  40. switch (chroma_sample) {
  41. case SDE_MDP_CHROMA_H2V1:
  42. *v_sample = 1;
  43. *h_sample = 2;
  44. break;
  45. case SDE_MDP_CHROMA_H1V2:
  46. *v_sample = 2;
  47. *h_sample = 1;
  48. break;
  49. case SDE_MDP_CHROMA_420:
  50. *v_sample = 2;
  51. *h_sample = 2;
  52. break;
  53. default:
  54. *v_sample = 1;
  55. *h_sample = 1;
  56. break;
  57. }
  58. }
  59. void sde_rot_intersect_rect(struct sde_rect *res_rect,
  60. const struct sde_rect *dst_rect,
  61. const struct sde_rect *sci_rect)
  62. {
  63. int l = max(dst_rect->x, sci_rect->x);
  64. int t = max(dst_rect->y, sci_rect->y);
  65. int r = min((dst_rect->x + dst_rect->w), (sci_rect->x + sci_rect->w));
  66. int b = min((dst_rect->y + dst_rect->h), (sci_rect->y + sci_rect->h));
  67. if (r < l || b < t)
  68. *res_rect = (struct sde_rect){0, 0, 0, 0};
  69. else
  70. *res_rect = (struct sde_rect){l, t, (r-l), (b-t)};
  71. }
  72. void sde_rot_crop_rect(struct sde_rect *src_rect,
  73. struct sde_rect *dst_rect,
  74. const struct sde_rect *sci_rect)
  75. {
  76. struct sde_rect res;
  77. sde_rot_intersect_rect(&res, dst_rect, sci_rect);
  78. if (res.w && res.h) {
  79. if ((res.w != dst_rect->w) || (res.h != dst_rect->h)) {
  80. src_rect->x = src_rect->x + (res.x - dst_rect->x);
  81. src_rect->y = src_rect->y + (res.y - dst_rect->y);
  82. src_rect->w = res.w;
  83. src_rect->h = res.h;
  84. }
  85. *dst_rect = (struct sde_rect)
  86. {(res.x - sci_rect->x), (res.y - sci_rect->y),
  87. res.w, res.h};
  88. }
  89. }
  90. /*
  91. * sde_rect_cmp() - compares two rects
  92. * @rect1 - rect value to compare
  93. * @rect2 - rect value to compare
  94. *
  95. * Returns 1 if the rects are same, 0 otherwise.
  96. */
  97. int sde_rect_cmp(struct sde_rect *rect1, struct sde_rect *rect2)
  98. {
  99. return rect1->x == rect2->x && rect1->y == rect2->y &&
  100. rect1->w == rect2->w && rect1->h == rect2->h;
  101. }
  102. /*
  103. * sde_rect_overlap_check() - compare two rects and check if they overlap
  104. * @rect1 - rect value to compare
  105. * @rect2 - rect value to compare
  106. *
  107. * Returns true if rects overlap, false otherwise.
  108. */
  109. bool sde_rect_overlap_check(struct sde_rect *rect1, struct sde_rect *rect2)
  110. {
  111. u32 rect1_left = rect1->x, rect1_right = rect1->x + rect1->w;
  112. u32 rect1_top = rect1->y, rect1_bottom = rect1->y + rect1->h;
  113. u32 rect2_left = rect2->x, rect2_right = rect2->x + rect2->w;
  114. u32 rect2_top = rect2->y, rect2_bottom = rect2->y + rect2->h;
  115. if ((rect1_right <= rect2_left) ||
  116. (rect1_left >= rect2_right) ||
  117. (rect1_bottom <= rect2_top) ||
  118. (rect1_top >= rect2_bottom))
  119. return false;
  120. return true;
  121. }
  122. int sde_mdp_get_rau_strides(u32 w, u32 h,
  123. struct sde_mdp_format_params *fmt,
  124. struct sde_mdp_plane_sizes *ps)
  125. {
  126. if (fmt->is_yuv) {
  127. ps->rau_cnt = DIV_ROUND_UP(w, 64);
  128. ps->ystride[0] = 64 * 4;
  129. ps->rau_h[0] = 4;
  130. ps->rau_h[1] = 2;
  131. if (fmt->chroma_sample == SDE_MDP_CHROMA_H1V2)
  132. ps->ystride[1] = 64 * 2;
  133. else if (fmt->chroma_sample == SDE_MDP_CHROMA_H2V1) {
  134. ps->ystride[1] = 32 * 4;
  135. ps->rau_h[1] = 4;
  136. } else
  137. ps->ystride[1] = 32 * 2;
  138. /* account for both chroma components */
  139. ps->ystride[1] <<= 1;
  140. } else if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  141. ps->rau_cnt = DIV_ROUND_UP(w, 32);
  142. ps->ystride[0] = 32 * 4 * fmt->bpp;
  143. ps->ystride[1] = 0;
  144. ps->rau_h[0] = 4;
  145. ps->rau_h[1] = 0;
  146. } else {
  147. SDEROT_ERR("Invalid format=%d\n", fmt->format);
  148. return -EINVAL;
  149. }
  150. ps->ystride[0] *= ps->rau_cnt;
  151. ps->ystride[1] *= ps->rau_cnt;
  152. ps->num_planes = 2;
  153. SDEROT_DBG("BWC rau_cnt=%d strides={%d,%d} heights={%d,%d}\n",
  154. ps->rau_cnt, ps->ystride[0], ps->ystride[1],
  155. ps->rau_h[0], ps->rau_h[1]);
  156. return 0;
  157. }
  158. static int sde_mdp_get_a5x_plane_size(struct sde_mdp_format_params *fmt,
  159. u32 width, u32 height, struct sde_mdp_plane_sizes *ps)
  160. {
  161. int rc = 0;
  162. if (sde_mdp_is_nv12_8b_format(fmt)) {
  163. ps->num_planes = 2;
  164. /* Y bitstream stride and plane size */
  165. ps->ystride[0] = ALIGN(width, 128);
  166. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 32),
  167. 4096);
  168. /* CbCr bitstream stride and plane size */
  169. ps->ystride[1] = ALIGN(width, 128);
  170. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  171. ALIGN(height / 2, 32), 4096);
  172. if (!sde_mdp_is_ubwc_format(fmt))
  173. goto done;
  174. ps->num_planes += 2;
  175. /* Y meta data stride and plane size */
  176. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  177. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  178. ALIGN(DIV_ROUND_UP(height, 8), 16), 4096);
  179. /* CbCr meta data stride and plane size */
  180. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  181. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  182. ALIGN(DIV_ROUND_UP(height / 2, 8), 16), 4096);
  183. } else if (sde_mdp_is_p010_format(fmt)) {
  184. ps->num_planes = 2;
  185. /* Y bitstream stride and plane size */
  186. ps->ystride[0] = ALIGN(width * 2, 256);
  187. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 16),
  188. 4096);
  189. /* CbCr bitstream stride and plane size */
  190. ps->ystride[1] = ALIGN(width * 2, 256);
  191. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  192. ALIGN(height / 2, 16), 4096);
  193. if (!sde_mdp_is_ubwc_format(fmt))
  194. goto done;
  195. ps->num_planes += 2;
  196. /* Y meta data stride and plane size */
  197. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  198. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  199. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  200. /* CbCr meta data stride and plane size */
  201. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  202. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  203. ALIGN(DIV_ROUND_UP(height / 2, 4), 16), 4096);
  204. } else if (sde_mdp_is_tp10_format(fmt)) {
  205. u32 yWidth = sde_mdp_general_align(width, 192);
  206. u32 yHeight = ALIGN(height, 16);
  207. u32 uvWidth = sde_mdp_general_align(width, 192);
  208. u32 uvHeight = ALIGN(height, 32);
  209. ps->num_planes = 2;
  210. /* Y bitstream stride and plane size */
  211. ps->ystride[0] = yWidth * TILEWIDTH_SIZE / Y_TILEWIDTH;
  212. ps->plane_size[0] = ALIGN(ps->ystride[0] *
  213. (yHeight * TILEHEIGHT_SIZE / Y_TILEHEIGHT),
  214. 4096);
  215. /* CbCr bitstream stride and plane size */
  216. ps->ystride[1] = uvWidth * TILEWIDTH_SIZE / UV_TILEWIDTH;
  217. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  218. (uvHeight * TILEHEIGHT_SIZE / UV_TILEHEIGHT),
  219. 4096);
  220. if (!sde_mdp_is_ubwc_format(fmt))
  221. goto done;
  222. ps->num_planes += 2;
  223. /* Y meta data stride and plane size */
  224. ps->ystride[2] = ALIGN(yWidth / Y_TILEWIDTH, 64);
  225. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  226. ALIGN((yHeight / Y_TILEHEIGHT), 16), 4096);
  227. /* CbCr meta data stride and plane size */
  228. ps->ystride[3] = ALIGN(uvWidth / UV_TILEWIDTH, 64);
  229. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  230. ALIGN((uvHeight / UV_TILEHEIGHT), 16), 4096);
  231. } else if (sde_mdp_is_rgb_format(fmt)) {
  232. uint32_t stride_alignment, bpp, aligned_bitstream_width;
  233. if (fmt->format == SDE_PIX_FMT_RGB_565_UBWC) {
  234. stride_alignment = 128;
  235. bpp = 2;
  236. } else {
  237. stride_alignment = 64;
  238. bpp = 4;
  239. }
  240. ps->num_planes = 1;
  241. /* RGB bitstream stride and plane size */
  242. aligned_bitstream_width = ALIGN(width, stride_alignment);
  243. ps->ystride[0] = aligned_bitstream_width * bpp;
  244. ps->plane_size[0] = ALIGN(bpp * aligned_bitstream_width *
  245. ALIGN(height, 16), 4096);
  246. if (!sde_mdp_is_ubwc_format(fmt))
  247. goto done;
  248. ps->num_planes += 1;
  249. /* RGB meta data stride and plane size */
  250. ps->ystride[2] = ALIGN(DIV_ROUND_UP(aligned_bitstream_width,
  251. 16), 64);
  252. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  253. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  254. } else {
  255. SDEROT_ERR("%s: UBWC format not supported for fmt:%d\n",
  256. __func__, fmt->format);
  257. rc = -EINVAL;
  258. }
  259. done:
  260. return rc;
  261. }
  262. int sde_mdp_get_plane_sizes(struct sde_mdp_format_params *fmt, u32 w, u32 h,
  263. struct sde_mdp_plane_sizes *ps, u32 bwc_mode, bool rotation)
  264. {
  265. int i, rc = 0;
  266. u32 bpp;
  267. if (ps == NULL)
  268. return -EINVAL;
  269. if ((w > SDE_ROT_MAX_IMG_WIDTH) || (h > SDE_ROT_MAX_IMG_HEIGHT))
  270. return -ERANGE;
  271. bpp = fmt->bpp;
  272. memset(ps, 0, sizeof(struct sde_mdp_plane_sizes));
  273. if (sde_mdp_is_tilea5x_format(fmt)) {
  274. rc = sde_mdp_get_a5x_plane_size(fmt, w, h, ps);
  275. } else if (bwc_mode) {
  276. u32 height, meta_size;
  277. rc = sde_mdp_get_rau_strides(w, h, fmt, ps);
  278. if (rc)
  279. return rc;
  280. height = DIV_ROUND_UP(h, ps->rau_h[0]);
  281. meta_size = DIV_ROUND_UP(ps->rau_cnt, 8);
  282. ps->ystride[1] += meta_size;
  283. ps->ystride[0] += ps->ystride[1] + meta_size;
  284. ps->plane_size[0] = ps->ystride[0] * height;
  285. ps->ystride[1] = 2;
  286. ps->plane_size[1] = 2 * ps->rau_cnt * height;
  287. SDEROT_DBG("BWC data stride=%d size=%d meta size=%d\n",
  288. ps->ystride[0], ps->plane_size[0], ps->plane_size[1]);
  289. } else {
  290. if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  291. ps->num_planes = 1;
  292. ps->plane_size[0] = w * h * bpp;
  293. ps->ystride[0] = w * bpp;
  294. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_VENUS ||
  295. fmt->format == SDE_PIX_FMT_Y_CRCB_H2V2_VENUS ||
  296. fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS) {
  297. int cf;
  298. switch (fmt->format) {
  299. case SDE_PIX_FMT_Y_CBCR_H2V2_VENUS:
  300. cf = MMM_COLOR_FMT_NV12;
  301. break;
  302. case SDE_PIX_FMT_Y_CRCB_H2V2_VENUS:
  303. cf = MMM_COLOR_FMT_NV21;
  304. break;
  305. case SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS:
  306. cf = MMM_COLOR_FMT_P010;
  307. break;
  308. default:
  309. SDEROT_ERR("unknown color format %d\n",
  310. fmt->format);
  311. return -EINVAL;
  312. }
  313. ps->num_planes = 2;
  314. ps->ystride[0] = MMM_COLOR_FMT_Y_STRIDE(cf, w);
  315. ps->ystride[1] = MMM_COLOR_FMT_UV_STRIDE(cf, w);
  316. ps->plane_size[0] = MMM_COLOR_FMT_Y_SCANLINES(cf, h) *
  317. ps->ystride[0];
  318. ps->plane_size[1] = MMM_COLOR_FMT_UV_SCANLINES(cf, h) *
  319. ps->ystride[1];
  320. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010) {
  321. /*
  322. * |<---Y1--->000000<---Y0--->000000| Plane0
  323. * |rrrrrrrrrr000000bbbbbbbbbb000000| Plane1
  324. * |--------------------------------|
  325. * 33222222222211111111110000000000 Bit
  326. * 10987654321098765432109876543210 Location
  327. */
  328. ps->num_planes = 2;
  329. ps->ystride[0] = w * 2;
  330. ps->ystride[1] = w * 2;
  331. ps->plane_size[0] = ps->ystride[0] * h;
  332. ps->plane_size[1] = ps->ystride[1] * h / 2;
  333. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_TP10) {
  334. u32 yWidth = sde_mdp_general_align(w, 192);
  335. u32 yHeight = ALIGN(h, 16);
  336. u32 uvWidth = sde_mdp_general_align(w, 192);
  337. u32 uvHeight = (ALIGN(h, 32)) / 2;
  338. ps->num_planes = 2;
  339. ps->ystride[0] = (yWidth / 3) * 4;
  340. ps->ystride[1] = (uvWidth / 3) * 4;
  341. ps->plane_size[0] = ALIGN(ps->ystride[0] * yHeight,
  342. 4096);
  343. ps->plane_size[1] = ALIGN(ps->ystride[1] * uvHeight,
  344. 4096);
  345. } else {
  346. u8 v_subsample, h_subsample, stride_align, height_align;
  347. u32 chroma_samp;
  348. chroma_samp = fmt->chroma_sample;
  349. sde_mdp_get_v_h_subsample_rate(chroma_samp,
  350. &v_subsample, &h_subsample);
  351. switch (fmt->format) {
  352. case SDE_PIX_FMT_Y_CR_CB_GH2V2:
  353. stride_align = 16;
  354. height_align = 1;
  355. break;
  356. default:
  357. stride_align = 1;
  358. height_align = 1;
  359. break;
  360. }
  361. ps->ystride[0] = ALIGN(w, stride_align);
  362. ps->ystride[1] = ALIGN(w / h_subsample, stride_align);
  363. ps->plane_size[0] = ps->ystride[0] *
  364. ALIGN(h, height_align);
  365. ps->plane_size[1] = ps->ystride[1] * (h / v_subsample);
  366. if (fmt->fetch_planes == SDE_MDP_PLANE_PSEUDO_PLANAR) {
  367. ps->num_planes = 2;
  368. ps->plane_size[1] *= 2;
  369. ps->ystride[1] *= 2;
  370. } else { /* planar */
  371. ps->num_planes = 3;
  372. ps->plane_size[2] = ps->plane_size[1];
  373. ps->ystride[2] = ps->ystride[1];
  374. }
  375. }
  376. }
  377. /* Safe to use MAX_PLANES as ps is memset at start of function */
  378. for (i = 0; i < SDE_ROT_MAX_PLANES; i++)
  379. ps->total_size += ps->plane_size[i];
  380. return rc;
  381. }
  382. static int sde_mdp_a5x_data_check(struct sde_mdp_data *data,
  383. struct sde_mdp_plane_sizes *ps,
  384. struct sde_mdp_format_params *fmt)
  385. {
  386. int i, inc;
  387. unsigned long data_size = 0;
  388. dma_addr_t base_addr;
  389. if (data->p[0].len == ps->plane_size[0])
  390. goto end;
  391. /* From this point, assumption is plane 0 is to be divided */
  392. data_size = data->p[0].len;
  393. if (data_size < ps->total_size) {
  394. SDEROT_ERR(
  395. "insufficient current mem len=%lu required mem len=%u\n",
  396. data_size, ps->total_size);
  397. return -ENOMEM;
  398. }
  399. base_addr = data->p[0].addr;
  400. if (sde_mdp_is_yuv_format(fmt)) {
  401. /************************************************/
  402. /* UBWC ** */
  403. /* buffer ** MDP PLANE */
  404. /* format ** */
  405. /************************************************/
  406. /* ------------------- ** -------------------- */
  407. /* | Y meta | ** | Y bitstream | */
  408. /* | data | ** | plane | */
  409. /* ------------------- ** -------------------- */
  410. /* | Y bitstream | ** | CbCr bitstream | */
  411. /* | data | ** | plane | */
  412. /* ------------------- ** -------------------- */
  413. /* | Cbcr metadata | ** | Y meta | */
  414. /* | data | ** | plane | */
  415. /* ------------------- ** -------------------- */
  416. /* | CbCr bitstream | ** | CbCr meta | */
  417. /* | data | ** | plane | */
  418. /* ------------------- ** -------------------- */
  419. /************************************************/
  420. /* configure Y bitstream plane */
  421. data->p[0].addr = base_addr + ps->plane_size[2];
  422. data->p[0].len = ps->plane_size[0];
  423. /* configure CbCr bitstream plane */
  424. data->p[1].addr = base_addr + ps->plane_size[0]
  425. + ps->plane_size[2] + ps->plane_size[3];
  426. data->p[1].len = ps->plane_size[1];
  427. if (!sde_mdp_is_ubwc_format(fmt))
  428. goto done;
  429. /* configure Y metadata plane */
  430. data->p[2].addr = base_addr;
  431. data->p[2].len = ps->plane_size[2];
  432. /* configure CbCr metadata plane */
  433. data->p[3].addr = base_addr + ps->plane_size[0]
  434. + ps->plane_size[2];
  435. data->p[3].len = ps->plane_size[3];
  436. } else {
  437. /************************************************/
  438. /* UBWC ** */
  439. /* buffer ** MDP PLANE */
  440. /* format ** */
  441. /************************************************/
  442. /* ------------------- ** -------------------- */
  443. /* | RGB meta | ** | RGB bitstream | */
  444. /* | data | ** | plane | */
  445. /* ------------------- ** -------------------- */
  446. /* | RGB bitstream | ** | NONE | */
  447. /* | data | ** | | */
  448. /* ------------------- ** -------------------- */
  449. /* ** | RGB meta | */
  450. /* ** | plane | */
  451. /* ** -------------------- */
  452. /************************************************/
  453. /* configure RGB bitstream plane */
  454. data->p[0].addr = base_addr + ps->plane_size[2];
  455. data->p[0].len = ps->plane_size[0];
  456. if (!sde_mdp_is_ubwc_format(fmt))
  457. goto done;
  458. /* configure RGB metadata plane */
  459. data->p[2].addr = base_addr;
  460. data->p[2].len = ps->plane_size[2];
  461. }
  462. done:
  463. data->num_planes = ps->num_planes;
  464. end:
  465. if (data->num_planes != ps->num_planes) {
  466. SDEROT_ERR("num_planes don't match: fmt:%d, data:%d, ps:%d\n",
  467. fmt->format, data->num_planes, ps->num_planes);
  468. return -EINVAL;
  469. }
  470. inc = (sde_mdp_is_yuv_format(fmt) ? 1 : 2);
  471. for (i = 0; i < SDE_ROT_MAX_PLANES; i += inc) {
  472. if (data->p[i].len != ps->plane_size[i]) {
  473. SDEROT_ERR(
  474. "plane:%d fmt:%d, len does not match: data:%lu, ps:%d\n",
  475. i, fmt->format, data->p[i].len,
  476. ps->plane_size[i]);
  477. return -EINVAL;
  478. }
  479. }
  480. return 0;
  481. }
  482. int sde_mdp_data_check(struct sde_mdp_data *data,
  483. struct sde_mdp_plane_sizes *ps,
  484. struct sde_mdp_format_params *fmt)
  485. {
  486. struct sde_mdp_img_data *prev, *curr;
  487. int i;
  488. if (!ps)
  489. return 0;
  490. if (!data || data->num_planes == 0)
  491. return -ENOMEM;
  492. if (sde_mdp_is_tilea5x_format(fmt))
  493. return sde_mdp_a5x_data_check(data, ps, fmt);
  494. SDEROT_DBG("srcp0=%pa len=%lu frame_size=%u\n", &data->p[0].addr,
  495. data->p[0].len, ps->total_size);
  496. for (i = 0; i < ps->num_planes; i++) {
  497. curr = &data->p[i];
  498. if (i >= data->num_planes) {
  499. u32 psize = ps->plane_size[i-1];
  500. prev = &data->p[i-1];
  501. if (prev->len > psize) {
  502. curr->len = prev->len - psize;
  503. prev->len = psize;
  504. }
  505. curr->addr = prev->addr + psize;
  506. }
  507. if (curr->len < ps->plane_size[i]) {
  508. SDEROT_ERR("insufficient mem=%lu p=%d len=%u\n",
  509. curr->len, i, ps->plane_size[i]);
  510. return -ENOMEM;
  511. }
  512. SDEROT_DBG("plane[%d] addr=%pa len=%lu\n", i,
  513. &curr->addr, curr->len);
  514. }
  515. data->num_planes = ps->num_planes;
  516. return 0;
  517. }
  518. int sde_validate_offset_for_ubwc_format(
  519. struct sde_mdp_format_params *fmt, u16 x, u16 y)
  520. {
  521. int ret;
  522. u16 micro_w = 0, micro_h = 0;
  523. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  524. if (ret || !micro_w || !micro_h) {
  525. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  526. return -EINVAL;
  527. }
  528. if (x % (micro_w * UBWC_META_MACRO_W_H)) {
  529. SDEROT_ERR("x=%d does not align with meta width=%d\n", x,
  530. micro_w * UBWC_META_MACRO_W_H);
  531. return -EINVAL;
  532. }
  533. if (y % (micro_h * UBWC_META_MACRO_W_H)) {
  534. SDEROT_ERR("y=%d does not align with meta height=%d\n", y,
  535. UBWC_META_MACRO_W_H);
  536. return -EINVAL;
  537. }
  538. return ret;
  539. }
  540. /* x and y are assumed to be valid, expected to line up with start of tiles */
  541. void sde_rot_ubwc_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  542. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  543. {
  544. u16 macro_w, micro_w, micro_h;
  545. u32 offset = 0;
  546. int ret;
  547. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  548. if (ret || !micro_w || !micro_h) {
  549. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  550. return;
  551. }
  552. macro_w = 4 * micro_w;
  553. if (sde_mdp_is_nv12_8b_format(fmt)) {
  554. u16 chroma_macro_w = macro_w / 2;
  555. u16 chroma_micro_w = micro_w / 2;
  556. /* plane 1 and 3 are chroma, with sub sample of 2 */
  557. offset = y * ps->ystride[0] +
  558. (x / macro_w) * 4096;
  559. if (offset < data->p[0].len) {
  560. data->p[0].addr += offset;
  561. } else {
  562. ret = 1;
  563. goto done;
  564. }
  565. offset = y / 2 * ps->ystride[1] +
  566. ((x / 2) / chroma_macro_w) * 4096;
  567. if (offset < data->p[1].len) {
  568. data->p[1].addr += offset;
  569. } else {
  570. ret = 2;
  571. goto done;
  572. }
  573. offset = (y / micro_h) * ps->ystride[2] +
  574. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  575. UBWC_META_BLOCK_SIZE;
  576. if (offset < data->p[2].len) {
  577. data->p[2].addr += offset;
  578. } else {
  579. ret = 3;
  580. goto done;
  581. }
  582. offset = ((y / 2) / micro_h) * ps->ystride[3] +
  583. (((x / 2) / chroma_micro_w) / UBWC_META_MACRO_W_H) *
  584. UBWC_META_BLOCK_SIZE;
  585. if (offset < data->p[3].len) {
  586. data->p[3].addr += offset;
  587. } else {
  588. ret = 4;
  589. goto done;
  590. }
  591. } else if (sde_mdp_is_nv12_10b_format(fmt)) {
  592. /* TODO: */
  593. SDEROT_ERR("%c%c%c%c format not implemented yet",
  594. fmt->format >> 0, fmt->format >> 8,
  595. fmt->format >> 16, fmt->format >> 24);
  596. ret = 1;
  597. goto done;
  598. } else {
  599. offset = y * ps->ystride[0] +
  600. (x / macro_w) * 4096;
  601. if (offset < data->p[0].len) {
  602. data->p[0].addr += offset;
  603. } else {
  604. ret = 1;
  605. goto done;
  606. }
  607. offset = DIV_ROUND_UP(y, micro_h) * ps->ystride[2] +
  608. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  609. UBWC_META_BLOCK_SIZE;
  610. if (offset < data->p[2].len) {
  611. data->p[2].addr += offset;
  612. } else {
  613. ret = 3;
  614. goto done;
  615. }
  616. }
  617. done:
  618. if (ret) {
  619. WARN(1, "idx %d, offsets:%u too large for buflen%lu\n",
  620. (ret - 1), offset, data->p[(ret - 1)].len);
  621. }
  622. }
  623. void sde_rot_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  624. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  625. {
  626. if ((x == 0) && (y == 0))
  627. return;
  628. if (sde_mdp_is_tilea5x_format(fmt)) {
  629. sde_rot_ubwc_data_calc_offset(data, x, y, ps, fmt);
  630. return;
  631. }
  632. data->p[0].addr += y * ps->ystride[0];
  633. if (data->num_planes == 1) {
  634. data->p[0].addr += x * fmt->bpp;
  635. } else {
  636. u16 xoff, yoff;
  637. u8 v_subsample, h_subsample;
  638. sde_mdp_get_v_h_subsample_rate(fmt->chroma_sample,
  639. &v_subsample, &h_subsample);
  640. xoff = x / h_subsample;
  641. yoff = y / v_subsample;
  642. data->p[0].addr += x;
  643. data->p[1].addr += xoff + (yoff * ps->ystride[1]);
  644. if (data->num_planes == 2) /* pseudo planar */
  645. data->p[1].addr += xoff;
  646. else /* planar */
  647. data->p[2].addr += xoff + (yoff * ps->ystride[2]);
  648. }
  649. }
  650. static int sde_smmu_get_domain_type(u32 flags, bool rotator)
  651. {
  652. int type;
  653. if (flags & SDE_SECURE_OVERLAY_SESSION)
  654. type = SDE_IOMMU_DOMAIN_ROT_SECURE;
  655. else
  656. type = SDE_IOMMU_DOMAIN_ROT_UNSECURE;
  657. return type;
  658. }
  659. static int sde_mdp_is_map_needed(struct sde_mdp_img_data *data)
  660. {
  661. if (data->flags & SDE_SECURE_CAMERA_SESSION)
  662. return false;
  663. return true;
  664. }
  665. static int sde_mdp_put_img(struct sde_mdp_img_data *data, bool rotator,
  666. int dir)
  667. {
  668. u32 domain;
  669. if (data->flags & SDE_ROT_EXT_IOVA) {
  670. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  671. &data->addr, data->len);
  672. return 0;
  673. }
  674. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  675. SDEROT_DBG("ion hdl=%pK buf=0x%pa\n", data->srcp_dma_buf,
  676. &data->addr);
  677. if (sde_mdp_is_map_needed(data) && data->mapped) {
  678. domain = sde_smmu_get_domain_type(data->flags,
  679. rotator);
  680. data->mapped = false;
  681. SDEROT_DBG("unmap %pad/%lx d:%u f:%x\n", &data->addr,
  682. data->len, domain, data->flags);
  683. }
  684. if (!data->skip_detach) {
  685. data->srcp_attachment->dma_map_attrs |=
  686. DMA_ATTR_DELAYED_UNMAP;
  687. dma_buf_unmap_attachment(data->srcp_attachment,
  688. data->srcp_table, dir);
  689. dma_buf_detach(data->srcp_dma_buf,
  690. data->srcp_attachment);
  691. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  692. dma_buf_put(data->srcp_dma_buf);
  693. data->srcp_dma_buf = NULL;
  694. }
  695. data->skip_detach = true;
  696. }
  697. } else {
  698. return -ENOMEM;
  699. }
  700. return 0;
  701. }
  702. static int sde_mdp_get_img(struct sde_fb_data *img,
  703. struct sde_mdp_img_data *data, struct device *dev,
  704. bool rotator, int dir)
  705. {
  706. int ret = -EINVAL;
  707. u32 domain;
  708. data->flags |= img->flags;
  709. data->offset = img->offset;
  710. if (data->flags & SDE_ROT_EXT_DMA_BUF) {
  711. data->srcp_dma_buf = img->buffer;
  712. } else if (data->flags & SDE_ROT_EXT_IOVA) {
  713. data->addr = img->addr;
  714. data->len = img->len;
  715. SDEROT_DBG("use client %pad/%lx\n", &data->addr, data->len);
  716. return 0;
  717. } else if (IS_ERR(data->srcp_dma_buf)) {
  718. SDEROT_ERR("error on ion_import_fd\n");
  719. ret = PTR_ERR(data->srcp_dma_buf);
  720. data->srcp_dma_buf = NULL;
  721. return ret;
  722. }
  723. if (sde_mdp_is_map_needed(data)) {
  724. domain = sde_smmu_get_domain_type(data->flags, rotator);
  725. SDEROT_DBG("%d domain=%d ihndl=%pK\n",
  726. __LINE__, domain, data->srcp_dma_buf);
  727. data->srcp_attachment =
  728. sde_smmu_dma_buf_attach(data->srcp_dma_buf, dev,
  729. domain);
  730. if (IS_ERR(data->srcp_attachment)) {
  731. SDEROT_ERR("%d Failed to attach dma buf\n", __LINE__);
  732. ret = PTR_ERR(data->srcp_attachment);
  733. goto err_put;
  734. }
  735. } else {
  736. data->srcp_attachment = dma_buf_attach(
  737. data->srcp_dma_buf, dev);
  738. if (IS_ERR(data->srcp_attachment)) {
  739. SDEROT_ERR(
  740. "Failed to attach dma buf for secure camera\n");
  741. ret = PTR_ERR(data->srcp_attachment);
  742. goto err_put;
  743. }
  744. }
  745. SDEROT_DBG("%d attach=%pK\n", __LINE__, data->srcp_attachment);
  746. data->addr = 0;
  747. data->len = 0;
  748. data->mapped = false;
  749. data->skip_detach = false;
  750. /* return early, mapping will be done later */
  751. return 0;
  752. err_put:
  753. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  754. dma_buf_put(data->srcp_dma_buf);
  755. data->srcp_dma_buf = NULL;
  756. }
  757. return ret;
  758. }
  759. static int sde_mdp_map_buffer(struct sde_mdp_img_data *data, bool rotator,
  760. int dir)
  761. {
  762. int ret = -EINVAL;
  763. struct scatterlist *sg;
  764. struct sg_table *sgt = NULL;
  765. unsigned int i;
  766. unsigned long flags = 0;
  767. if (data->addr && data->len)
  768. return 0;
  769. if (data->flags & SDE_ROT_EXT_IOVA) {
  770. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  771. &data->addr, data->len);
  772. return 0;
  773. }
  774. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  775. /*
  776. * dma_buf_map_attachment will call into
  777. * dma_map_sg_attrs, and so all cache maintenance
  778. * attribute and lazy unmap attribute will be all
  779. * provided here.
  780. */
  781. data->srcp_attachment->dma_map_attrs |=
  782. DMA_ATTR_DELAYED_UNMAP;
  783. if (data->srcp_dma_buf && data->srcp_dma_buf->ops &&
  784. data->srcp_dma_buf->ops->get_flags) {
  785. if (data->srcp_dma_buf->ops->get_flags(
  786. data->srcp_dma_buf,
  787. &flags) == 0) {
  788. if ((flags & ION_FLAG_CACHED) == 0) {
  789. SDEROT_DBG("dmabuf is uncached type\n");
  790. data->srcp_attachment->dma_map_attrs |=
  791. DMA_ATTR_SKIP_CPU_SYNC;
  792. }
  793. }
  794. }
  795. sgt = dma_buf_map_attachment(
  796. data->srcp_attachment, dir);
  797. if (IS_ERR_OR_NULL(sgt) ||
  798. IS_ERR_OR_NULL(sgt->sgl)) {
  799. SDEROT_ERR("Failed to map attachment\n");
  800. ret = PTR_ERR(sgt);
  801. goto err_detach;
  802. }
  803. data->srcp_table = sgt;
  804. data->len = 0;
  805. for_each_sgtable_sg(sgt, sg, i) {
  806. data->len += sg->length;
  807. }
  808. if (sde_mdp_is_map_needed(data)) {
  809. data->addr = data->srcp_table->sgl->dma_address;
  810. SDEROT_DBG("map %pad/%lx f:%x\n",
  811. &data->addr,
  812. data->len,
  813. data->flags);
  814. data->mapped = true;
  815. ret = 0;
  816. } else {
  817. if (sgt->nents != 1) {
  818. SDEROT_ERR(
  819. "Fail ion buffer mapping for secure camera\n");
  820. ret = -EINVAL;
  821. goto err_unmap;
  822. }
  823. if (((uint64_t)sg_dma_address(sgt->sgl) >=
  824. PHY_ADDR_4G - sgt->sgl->length)) {
  825. SDEROT_ERR(
  826. "ion buffer mapped size invalid, size=%d\n",
  827. sgt->sgl->length);
  828. ret = -EINVAL;
  829. goto err_unmap;
  830. }
  831. data->addr = sg_phys(data->srcp_table->sgl);
  832. ret = 0;
  833. }
  834. }
  835. if (!data->addr) {
  836. SDEROT_ERR("start address is zero!\n");
  837. sde_mdp_put_img(data, rotator, dir);
  838. return -ENOMEM;
  839. }
  840. if (!ret && (data->offset < data->len)) {
  841. data->addr += data->offset;
  842. data->len -= data->offset;
  843. SDEROT_DBG("ihdl=%pK buf=0x%pa len=0x%lx\n",
  844. data->srcp_dma_buf, &data->addr, data->len);
  845. } else {
  846. sde_mdp_put_img(data, rotator, dir);
  847. return ret ? : -EOVERFLOW;
  848. }
  849. return ret;
  850. err_unmap:
  851. dma_buf_unmap_attachment(data->srcp_attachment, data->srcp_table, dir);
  852. err_detach:
  853. dma_buf_detach(data->srcp_dma_buf, data->srcp_attachment);
  854. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  855. dma_buf_put(data->srcp_dma_buf);
  856. data->srcp_dma_buf = NULL;
  857. }
  858. return ret;
  859. }
  860. static int sde_mdp_data_get(struct sde_mdp_data *data,
  861. struct sde_fb_data *planes, int num_planes, u32 flags,
  862. struct device *dev, bool rotator, int dir)
  863. {
  864. int i, rc = 0;
  865. if ((num_planes <= 0) || (num_planes > SDE_ROT_MAX_PLANES))
  866. return -EINVAL;
  867. for (i = 0; i < num_planes; i++) {
  868. data->p[i].flags = flags;
  869. rc = sde_mdp_get_img(&planes[i], &data->p[i], dev, rotator,
  870. dir);
  871. if (rc) {
  872. SDEROT_ERR("failed to get buf p=%d flags=%x\n",
  873. i, flags);
  874. while (i > 0) {
  875. i--;
  876. sde_mdp_put_img(&data->p[i], rotator, dir);
  877. }
  878. break;
  879. }
  880. }
  881. data->num_planes = i;
  882. return rc;
  883. }
  884. int sde_mdp_data_map(struct sde_mdp_data *data, bool rotator, int dir)
  885. {
  886. int i, rc = 0;
  887. if (!data || !data->num_planes || data->num_planes > SDE_ROT_MAX_PLANES)
  888. return -EINVAL;
  889. for (i = 0; i < data->num_planes; i++) {
  890. rc = sde_mdp_map_buffer(&data->p[i], rotator, dir);
  891. if (rc) {
  892. SDEROT_ERR("failed to map buf p=%d\n", i);
  893. while (i > 0) {
  894. i--;
  895. sde_mdp_put_img(&data->p[i], rotator, dir);
  896. }
  897. break;
  898. }
  899. }
  900. SDEROT_EVTLOG(data->num_planes, dir, data->p[0].addr, data->p[0].len,
  901. data->p[0].mapped);
  902. return rc;
  903. }
  904. void sde_mdp_data_free(struct sde_mdp_data *data, bool rotator, int dir)
  905. {
  906. int i;
  907. sde_smmu_ctrl(1);
  908. for (i = 0; i < data->num_planes && data->p[i].len; i++)
  909. sde_mdp_put_img(&data->p[i], rotator, dir);
  910. sde_smmu_ctrl(0);
  911. data->num_planes = 0;
  912. }
  913. int sde_mdp_data_get_and_validate_size(struct sde_mdp_data *data,
  914. struct sde_fb_data *planes, int num_planes, u32 flags,
  915. struct device *dev, bool rotator, int dir,
  916. struct sde_layer_buffer *buffer)
  917. {
  918. struct sde_mdp_format_params *fmt;
  919. struct sde_mdp_plane_sizes ps;
  920. int ret, i;
  921. unsigned long total_buf_len = 0;
  922. fmt = sde_get_format_params(buffer->format);
  923. if (!fmt) {
  924. SDEROT_ERR("Format %d not supported\n", buffer->format);
  925. return -EINVAL;
  926. }
  927. ret = sde_mdp_data_get(data, planes, num_planes,
  928. flags, dev, rotator, dir);
  929. if (ret)
  930. return ret;
  931. sde_mdp_get_plane_sizes(fmt, buffer->width, buffer->height, &ps, 0, 0);
  932. for (i = 0; i < num_planes ; i++) {
  933. unsigned long plane_len = (data->p[i].srcp_dma_buf) ?
  934. data->p[i].srcp_dma_buf->size : data->p[i].len;
  935. if (plane_len < planes[i].offset) {
  936. SDEROT_ERR("Offset=%d larger than buffer size=%lu\n",
  937. planes[i].offset, plane_len);
  938. ret = -EINVAL;
  939. goto buf_too_small;
  940. }
  941. total_buf_len += plane_len - planes[i].offset;
  942. }
  943. if (total_buf_len < ps.total_size) {
  944. SDEROT_ERR("Buffer size=%lu, expected size=%d\n",
  945. total_buf_len,
  946. ps.total_size);
  947. ret = -EINVAL;
  948. goto buf_too_small;
  949. }
  950. return 0;
  951. buf_too_small:
  952. sde_mdp_data_free(data, rotator, dir);
  953. return ret;
  954. }
  955. static struct sg_table *sde_rot_dmabuf_map_tiny(
  956. struct dma_buf_attachment *attach, enum dma_data_direction dir)
  957. {
  958. struct sde_mdp_img_data *data = attach->dmabuf->priv;
  959. struct sg_table *sgt;
  960. unsigned int order;
  961. struct page *p;
  962. if (!data) {
  963. SDEROT_ERR("NULL img data\n");
  964. return NULL;
  965. }
  966. if (data->len > PAGE_SIZE) {
  967. SDEROT_ERR("DMA buffer size is larger than %ld, bufsize:%ld\n",
  968. PAGE_SIZE, data->len);
  969. return NULL;
  970. }
  971. order = get_order(data->len);
  972. p = alloc_pages(GFP_KERNEL, order);
  973. if (!p) {
  974. SDEROT_ERR("Fail allocating page for datasize:%ld\n",
  975. data->len);
  976. return NULL;
  977. }
  978. sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
  979. if (!sgt)
  980. goto free_alloc_pages;
  981. /* only alloc a single page */
  982. if (sg_alloc_table(sgt, 1, GFP_KERNEL)) {
  983. SDEROT_ERR("fail sg_alloc_table\n");
  984. goto free_sgt;
  985. }
  986. sg_set_page(sgt->sgl, p, data->len, 0);
  987. if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) {
  988. SDEROT_ERR("fail dma_map_sg\n");
  989. goto free_table;
  990. }
  991. SDEROT_DBG("Successful generate sg_table:%pK datalen:%ld\n",
  992. sgt, data->len);
  993. return sgt;
  994. free_table:
  995. sg_free_table(sgt);
  996. free_sgt:
  997. kfree(sgt);
  998. free_alloc_pages:
  999. __free_pages(p, order);
  1000. return NULL;
  1001. }
  1002. static void sde_rot_dmabuf_unmap(struct dma_buf_attachment *attach,
  1003. struct sg_table *sgt, enum dma_data_direction dir)
  1004. {
  1005. struct scatterlist *sg;
  1006. int i;
  1007. SDEROT_DBG("DMABUF unmap, sgt:%pK\n", sgt);
  1008. dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir);
  1009. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  1010. put_page(sg_page(sg));
  1011. __free_page(sg_page(sg));
  1012. }
  1013. sg_free_table(sgt);
  1014. kfree(sgt);
  1015. }
  1016. static void sde_rot_dmabuf_release(struct dma_buf *buf)
  1017. {
  1018. SDEROT_DBG("Release dmabuf:%pK\n", buf);
  1019. }
  1020. static int sde_rot_dmabuf_no_mmap(struct dma_buf *buf,
  1021. struct vm_area_struct *vma)
  1022. {
  1023. SDEROT_WARN("NOT SUPPORTING dmabuf mmap\n");
  1024. return -EINVAL;
  1025. }
  1026. static const struct dma_buf_ops sde_rot_dmabuf_ops = {
  1027. .map_dma_buf = sde_rot_dmabuf_map_tiny,
  1028. .unmap_dma_buf = sde_rot_dmabuf_unmap,
  1029. .release = sde_rot_dmabuf_release,
  1030. .mmap = sde_rot_dmabuf_no_mmap,
  1031. };
  1032. struct dma_buf *sde_rot_get_dmabuf(struct sde_mdp_img_data *data)
  1033. {
  1034. DEFINE_DMA_BUF_EXPORT_INFO(exp_info);
  1035. exp_info.ops = &sde_rot_dmabuf_ops;
  1036. exp_info.size = (size_t)data->len;
  1037. exp_info.flags = O_RDWR;
  1038. exp_info.priv = data;
  1039. return dma_buf_export(&exp_info);
  1040. }
  1041. #if (LINUX_VERSION_CODE >= KERNEL_VERSION(5, 19, 0))
  1042. MODULE_IMPORT_NS(DMA_BUF);
  1043. #endif