sde_rotator_util.c 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2012, 2015-2019, The Linux Foundation. All rights reserved.
  4. */
  5. #define pr_fmt(fmt) "%s: " fmt, __func__
  6. #include <linux/dma-mapping.h>
  7. #include <linux/errno.h>
  8. #include <linux/file.h>
  9. #include <linux/spinlock.h>
  10. #include <linux/types.h>
  11. #include <linux/major.h>
  12. #include <linux/dma-buf.h>
  13. #include <linux/debugfs.h>
  14. #include <linux/delay.h>
  15. #include <linux/wait.h>
  16. #include <linux/clk.h>
  17. #include <linux/slab.h>
  18. #include <linux/io.h>
  19. #include <linux/iopoll.h>
  20. #include <linux/regulator/consumer.h>
  21. #include <media/msm_media_info.h>
  22. #include <linux/videodev2.h>
  23. #include <linux/ion.h>
  24. #include "sde_rotator_util.h"
  25. #include "sde_rotator_smmu.h"
  26. #include "sde_rotator_debug.h"
  27. #define Y_TILEWIDTH 48
  28. #define Y_TILEHEIGHT 4
  29. #define UV_TILEWIDTH 48
  30. #define UV_TILEHEIGHT 8
  31. #define TILEWIDTH_SIZE 64
  32. #define TILEHEIGHT_SIZE 4
  33. void sde_mdp_get_v_h_subsample_rate(u8 chroma_sample,
  34. u8 *v_sample, u8 *h_sample)
  35. {
  36. switch (chroma_sample) {
  37. case SDE_MDP_CHROMA_H2V1:
  38. *v_sample = 1;
  39. *h_sample = 2;
  40. break;
  41. case SDE_MDP_CHROMA_H1V2:
  42. *v_sample = 2;
  43. *h_sample = 1;
  44. break;
  45. case SDE_MDP_CHROMA_420:
  46. *v_sample = 2;
  47. *h_sample = 2;
  48. break;
  49. default:
  50. *v_sample = 1;
  51. *h_sample = 1;
  52. break;
  53. }
  54. }
  55. void sde_rot_intersect_rect(struct sde_rect *res_rect,
  56. const struct sde_rect *dst_rect,
  57. const struct sde_rect *sci_rect)
  58. {
  59. int l = max(dst_rect->x, sci_rect->x);
  60. int t = max(dst_rect->y, sci_rect->y);
  61. int r = min((dst_rect->x + dst_rect->w), (sci_rect->x + sci_rect->w));
  62. int b = min((dst_rect->y + dst_rect->h), (sci_rect->y + sci_rect->h));
  63. if (r < l || b < t)
  64. *res_rect = (struct sde_rect){0, 0, 0, 0};
  65. else
  66. *res_rect = (struct sde_rect){l, t, (r-l), (b-t)};
  67. }
  68. void sde_rot_crop_rect(struct sde_rect *src_rect,
  69. struct sde_rect *dst_rect,
  70. const struct sde_rect *sci_rect)
  71. {
  72. struct sde_rect res;
  73. sde_rot_intersect_rect(&res, dst_rect, sci_rect);
  74. if (res.w && res.h) {
  75. if ((res.w != dst_rect->w) || (res.h != dst_rect->h)) {
  76. src_rect->x = src_rect->x + (res.x - dst_rect->x);
  77. src_rect->y = src_rect->y + (res.y - dst_rect->y);
  78. src_rect->w = res.w;
  79. src_rect->h = res.h;
  80. }
  81. *dst_rect = (struct sde_rect)
  82. {(res.x - sci_rect->x), (res.y - sci_rect->y),
  83. res.w, res.h};
  84. }
  85. }
  86. /*
  87. * sde_rect_cmp() - compares two rects
  88. * @rect1 - rect value to compare
  89. * @rect2 - rect value to compare
  90. *
  91. * Returns 1 if the rects are same, 0 otherwise.
  92. */
  93. int sde_rect_cmp(struct sde_rect *rect1, struct sde_rect *rect2)
  94. {
  95. return rect1->x == rect2->x && rect1->y == rect2->y &&
  96. rect1->w == rect2->w && rect1->h == rect2->h;
  97. }
  98. /*
  99. * sde_rect_overlap_check() - compare two rects and check if they overlap
  100. * @rect1 - rect value to compare
  101. * @rect2 - rect value to compare
  102. *
  103. * Returns true if rects overlap, false otherwise.
  104. */
  105. bool sde_rect_overlap_check(struct sde_rect *rect1, struct sde_rect *rect2)
  106. {
  107. u32 rect1_left = rect1->x, rect1_right = rect1->x + rect1->w;
  108. u32 rect1_top = rect1->y, rect1_bottom = rect1->y + rect1->h;
  109. u32 rect2_left = rect2->x, rect2_right = rect2->x + rect2->w;
  110. u32 rect2_top = rect2->y, rect2_bottom = rect2->y + rect2->h;
  111. if ((rect1_right <= rect2_left) ||
  112. (rect1_left >= rect2_right) ||
  113. (rect1_bottom <= rect2_top) ||
  114. (rect1_top >= rect2_bottom))
  115. return false;
  116. return true;
  117. }
  118. int sde_mdp_get_rau_strides(u32 w, u32 h,
  119. struct sde_mdp_format_params *fmt,
  120. struct sde_mdp_plane_sizes *ps)
  121. {
  122. if (fmt->is_yuv) {
  123. ps->rau_cnt = DIV_ROUND_UP(w, 64);
  124. ps->ystride[0] = 64 * 4;
  125. ps->rau_h[0] = 4;
  126. ps->rau_h[1] = 2;
  127. if (fmt->chroma_sample == SDE_MDP_CHROMA_H1V2)
  128. ps->ystride[1] = 64 * 2;
  129. else if (fmt->chroma_sample == SDE_MDP_CHROMA_H2V1) {
  130. ps->ystride[1] = 32 * 4;
  131. ps->rau_h[1] = 4;
  132. } else
  133. ps->ystride[1] = 32 * 2;
  134. /* account for both chroma components */
  135. ps->ystride[1] <<= 1;
  136. } else if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  137. ps->rau_cnt = DIV_ROUND_UP(w, 32);
  138. ps->ystride[0] = 32 * 4 * fmt->bpp;
  139. ps->ystride[1] = 0;
  140. ps->rau_h[0] = 4;
  141. ps->rau_h[1] = 0;
  142. } else {
  143. SDEROT_ERR("Invalid format=%d\n", fmt->format);
  144. return -EINVAL;
  145. }
  146. ps->ystride[0] *= ps->rau_cnt;
  147. ps->ystride[1] *= ps->rau_cnt;
  148. ps->num_planes = 2;
  149. SDEROT_DBG("BWC rau_cnt=%d strides={%d,%d} heights={%d,%d}\n",
  150. ps->rau_cnt, ps->ystride[0], ps->ystride[1],
  151. ps->rau_h[0], ps->rau_h[1]);
  152. return 0;
  153. }
  154. static int sde_mdp_get_a5x_plane_size(struct sde_mdp_format_params *fmt,
  155. u32 width, u32 height, struct sde_mdp_plane_sizes *ps)
  156. {
  157. int rc = 0;
  158. if (sde_mdp_is_nv12_8b_format(fmt)) {
  159. ps->num_planes = 2;
  160. /* Y bitstream stride and plane size */
  161. ps->ystride[0] = ALIGN(width, 128);
  162. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 32),
  163. 4096);
  164. /* CbCr bitstream stride and plane size */
  165. ps->ystride[1] = ALIGN(width, 128);
  166. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  167. ALIGN(height / 2, 32), 4096);
  168. if (!sde_mdp_is_ubwc_format(fmt))
  169. goto done;
  170. ps->num_planes += 2;
  171. /* Y meta data stride and plane size */
  172. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  173. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  174. ALIGN(DIV_ROUND_UP(height, 8), 16), 4096);
  175. /* CbCr meta data stride and plane size */
  176. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  177. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  178. ALIGN(DIV_ROUND_UP(height / 2, 8), 16), 4096);
  179. } else if (sde_mdp_is_p010_format(fmt)) {
  180. ps->num_planes = 2;
  181. /* Y bitstream stride and plane size */
  182. ps->ystride[0] = ALIGN(width * 2, 256);
  183. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 16),
  184. 4096);
  185. /* CbCr bitstream stride and plane size */
  186. ps->ystride[1] = ALIGN(width * 2, 256);
  187. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  188. ALIGN(height / 2, 16), 4096);
  189. if (!sde_mdp_is_ubwc_format(fmt))
  190. goto done;
  191. ps->num_planes += 2;
  192. /* Y meta data stride and plane size */
  193. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  194. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  195. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  196. /* CbCr meta data stride and plane size */
  197. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  198. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  199. ALIGN(DIV_ROUND_UP(height / 2, 4), 16), 4096);
  200. } else if (sde_mdp_is_tp10_format(fmt)) {
  201. u32 yWidth = sde_mdp_general_align(width, 192);
  202. u32 yHeight = ALIGN(height, 16);
  203. u32 uvWidth = sde_mdp_general_align(width, 192);
  204. u32 uvHeight = ALIGN(height, 32);
  205. ps->num_planes = 2;
  206. /* Y bitstream stride and plane size */
  207. ps->ystride[0] = yWidth * TILEWIDTH_SIZE / Y_TILEWIDTH;
  208. ps->plane_size[0] = ALIGN(ps->ystride[0] *
  209. (yHeight * TILEHEIGHT_SIZE / Y_TILEHEIGHT),
  210. 4096);
  211. /* CbCr bitstream stride and plane size */
  212. ps->ystride[1] = uvWidth * TILEWIDTH_SIZE / UV_TILEWIDTH;
  213. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  214. (uvHeight * TILEHEIGHT_SIZE / UV_TILEHEIGHT),
  215. 4096);
  216. if (!sde_mdp_is_ubwc_format(fmt))
  217. goto done;
  218. ps->num_planes += 2;
  219. /* Y meta data stride and plane size */
  220. ps->ystride[2] = ALIGN(yWidth / Y_TILEWIDTH, 64);
  221. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  222. ALIGN((yHeight / Y_TILEHEIGHT), 16), 4096);
  223. /* CbCr meta data stride and plane size */
  224. ps->ystride[3] = ALIGN(uvWidth / UV_TILEWIDTH, 64);
  225. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  226. ALIGN((uvHeight / UV_TILEHEIGHT), 16), 4096);
  227. } else if (sde_mdp_is_rgb_format(fmt)) {
  228. uint32_t stride_alignment, bpp, aligned_bitstream_width;
  229. if (fmt->format == SDE_PIX_FMT_RGB_565_UBWC) {
  230. stride_alignment = 128;
  231. bpp = 2;
  232. } else {
  233. stride_alignment = 64;
  234. bpp = 4;
  235. }
  236. ps->num_planes = 1;
  237. /* RGB bitstream stride and plane size */
  238. aligned_bitstream_width = ALIGN(width, stride_alignment);
  239. ps->ystride[0] = aligned_bitstream_width * bpp;
  240. ps->plane_size[0] = ALIGN(bpp * aligned_bitstream_width *
  241. ALIGN(height, 16), 4096);
  242. if (!sde_mdp_is_ubwc_format(fmt))
  243. goto done;
  244. ps->num_planes += 1;
  245. /* RGB meta data stride and plane size */
  246. ps->ystride[2] = ALIGN(DIV_ROUND_UP(aligned_bitstream_width,
  247. 16), 64);
  248. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  249. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  250. } else {
  251. SDEROT_ERR("%s: UBWC format not supported for fmt:%d\n",
  252. __func__, fmt->format);
  253. rc = -EINVAL;
  254. }
  255. done:
  256. return rc;
  257. }
  258. int sde_mdp_get_plane_sizes(struct sde_mdp_format_params *fmt, u32 w, u32 h,
  259. struct sde_mdp_plane_sizes *ps, u32 bwc_mode, bool rotation)
  260. {
  261. int i, rc = 0;
  262. u32 bpp;
  263. if (ps == NULL)
  264. return -EINVAL;
  265. if ((w > SDE_ROT_MAX_IMG_WIDTH) || (h > SDE_ROT_MAX_IMG_HEIGHT))
  266. return -ERANGE;
  267. bpp = fmt->bpp;
  268. memset(ps, 0, sizeof(struct sde_mdp_plane_sizes));
  269. if (sde_mdp_is_tilea5x_format(fmt)) {
  270. rc = sde_mdp_get_a5x_plane_size(fmt, w, h, ps);
  271. } else if (bwc_mode) {
  272. u32 height, meta_size;
  273. rc = sde_mdp_get_rau_strides(w, h, fmt, ps);
  274. if (rc)
  275. return rc;
  276. height = DIV_ROUND_UP(h, ps->rau_h[0]);
  277. meta_size = DIV_ROUND_UP(ps->rau_cnt, 8);
  278. ps->ystride[1] += meta_size;
  279. ps->ystride[0] += ps->ystride[1] + meta_size;
  280. ps->plane_size[0] = ps->ystride[0] * height;
  281. ps->ystride[1] = 2;
  282. ps->plane_size[1] = 2 * ps->rau_cnt * height;
  283. SDEROT_DBG("BWC data stride=%d size=%d meta size=%d\n",
  284. ps->ystride[0], ps->plane_size[0], ps->plane_size[1]);
  285. } else {
  286. if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  287. ps->num_planes = 1;
  288. ps->plane_size[0] = w * h * bpp;
  289. ps->ystride[0] = w * bpp;
  290. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_VENUS ||
  291. fmt->format == SDE_PIX_FMT_Y_CRCB_H2V2_VENUS ||
  292. fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS) {
  293. int cf;
  294. switch (fmt->format) {
  295. case SDE_PIX_FMT_Y_CBCR_H2V2_VENUS:
  296. cf = COLOR_FMT_NV12;
  297. break;
  298. case SDE_PIX_FMT_Y_CRCB_H2V2_VENUS:
  299. cf = COLOR_FMT_NV21;
  300. break;
  301. case SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS:
  302. cf = COLOR_FMT_P010;
  303. break;
  304. default:
  305. SDEROT_ERR("unknown color format %d\n",
  306. fmt->format);
  307. return -EINVAL;
  308. }
  309. ps->num_planes = 2;
  310. ps->ystride[0] = VENUS_Y_STRIDE(cf, w);
  311. ps->ystride[1] = VENUS_UV_STRIDE(cf, w);
  312. ps->plane_size[0] = VENUS_Y_SCANLINES(cf, h) *
  313. ps->ystride[0];
  314. ps->plane_size[1] = VENUS_UV_SCANLINES(cf, h) *
  315. ps->ystride[1];
  316. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010) {
  317. /*
  318. * |<---Y1--->000000<---Y0--->000000| Plane0
  319. * |rrrrrrrrrr000000bbbbbbbbbb000000| Plane1
  320. * |--------------------------------|
  321. * 33222222222211111111110000000000 Bit
  322. * 10987654321098765432109876543210 Location
  323. */
  324. ps->num_planes = 2;
  325. ps->ystride[0] = w * 2;
  326. ps->ystride[1] = w * 2;
  327. ps->plane_size[0] = ps->ystride[0] * h;
  328. ps->plane_size[1] = ps->ystride[1] * h / 2;
  329. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_TP10) {
  330. u32 yWidth = sde_mdp_general_align(w, 192);
  331. u32 yHeight = ALIGN(h, 16);
  332. u32 uvWidth = sde_mdp_general_align(w, 192);
  333. u32 uvHeight = (ALIGN(h, 32)) / 2;
  334. ps->num_planes = 2;
  335. ps->ystride[0] = (yWidth / 3) * 4;
  336. ps->ystride[1] = (uvWidth / 3) * 4;
  337. ps->plane_size[0] = ALIGN(ps->ystride[0] * yHeight,
  338. 4096);
  339. ps->plane_size[1] = ALIGN(ps->ystride[1] * uvHeight,
  340. 4096);
  341. } else {
  342. u8 v_subsample, h_subsample, stride_align, height_align;
  343. u32 chroma_samp;
  344. chroma_samp = fmt->chroma_sample;
  345. sde_mdp_get_v_h_subsample_rate(chroma_samp,
  346. &v_subsample, &h_subsample);
  347. switch (fmt->format) {
  348. case SDE_PIX_FMT_Y_CR_CB_GH2V2:
  349. stride_align = 16;
  350. height_align = 1;
  351. break;
  352. default:
  353. stride_align = 1;
  354. height_align = 1;
  355. break;
  356. }
  357. ps->ystride[0] = ALIGN(w, stride_align);
  358. ps->ystride[1] = ALIGN(w / h_subsample, stride_align);
  359. ps->plane_size[0] = ps->ystride[0] *
  360. ALIGN(h, height_align);
  361. ps->plane_size[1] = ps->ystride[1] * (h / v_subsample);
  362. if (fmt->fetch_planes == SDE_MDP_PLANE_PSEUDO_PLANAR) {
  363. ps->num_planes = 2;
  364. ps->plane_size[1] *= 2;
  365. ps->ystride[1] *= 2;
  366. } else { /* planar */
  367. ps->num_planes = 3;
  368. ps->plane_size[2] = ps->plane_size[1];
  369. ps->ystride[2] = ps->ystride[1];
  370. }
  371. }
  372. }
  373. /* Safe to use MAX_PLANES as ps is memset at start of function */
  374. for (i = 0; i < SDE_ROT_MAX_PLANES; i++)
  375. ps->total_size += ps->plane_size[i];
  376. return rc;
  377. }
  378. static int sde_mdp_a5x_data_check(struct sde_mdp_data *data,
  379. struct sde_mdp_plane_sizes *ps,
  380. struct sde_mdp_format_params *fmt)
  381. {
  382. int i, inc;
  383. unsigned long data_size = 0;
  384. dma_addr_t base_addr;
  385. if (data->p[0].len == ps->plane_size[0])
  386. goto end;
  387. /* From this point, assumption is plane 0 is to be divided */
  388. data_size = data->p[0].len;
  389. if (data_size < ps->total_size) {
  390. SDEROT_ERR(
  391. "insufficient current mem len=%lu required mem len=%u\n",
  392. data_size, ps->total_size);
  393. return -ENOMEM;
  394. }
  395. base_addr = data->p[0].addr;
  396. if (sde_mdp_is_yuv_format(fmt)) {
  397. /************************************************/
  398. /* UBWC ** */
  399. /* buffer ** MDP PLANE */
  400. /* format ** */
  401. /************************************************/
  402. /* ------------------- ** -------------------- */
  403. /* | Y meta | ** | Y bitstream | */
  404. /* | data | ** | plane | */
  405. /* ------------------- ** -------------------- */
  406. /* | Y bitstream | ** | CbCr bitstream | */
  407. /* | data | ** | plane | */
  408. /* ------------------- ** -------------------- */
  409. /* | Cbcr metadata | ** | Y meta | */
  410. /* | data | ** | plane | */
  411. /* ------------------- ** -------------------- */
  412. /* | CbCr bitstream | ** | CbCr meta | */
  413. /* | data | ** | plane | */
  414. /* ------------------- ** -------------------- */
  415. /************************************************/
  416. /* configure Y bitstream plane */
  417. data->p[0].addr = base_addr + ps->plane_size[2];
  418. data->p[0].len = ps->plane_size[0];
  419. /* configure CbCr bitstream plane */
  420. data->p[1].addr = base_addr + ps->plane_size[0]
  421. + ps->plane_size[2] + ps->plane_size[3];
  422. data->p[1].len = ps->plane_size[1];
  423. if (!sde_mdp_is_ubwc_format(fmt))
  424. goto done;
  425. /* configure Y metadata plane */
  426. data->p[2].addr = base_addr;
  427. data->p[2].len = ps->plane_size[2];
  428. /* configure CbCr metadata plane */
  429. data->p[3].addr = base_addr + ps->plane_size[0]
  430. + ps->plane_size[2];
  431. data->p[3].len = ps->plane_size[3];
  432. } else {
  433. /************************************************/
  434. /* UBWC ** */
  435. /* buffer ** MDP PLANE */
  436. /* format ** */
  437. /************************************************/
  438. /* ------------------- ** -------------------- */
  439. /* | RGB meta | ** | RGB bitstream | */
  440. /* | data | ** | plane | */
  441. /* ------------------- ** -------------------- */
  442. /* | RGB bitstream | ** | NONE | */
  443. /* | data | ** | | */
  444. /* ------------------- ** -------------------- */
  445. /* ** | RGB meta | */
  446. /* ** | plane | */
  447. /* ** -------------------- */
  448. /************************************************/
  449. /* configure RGB bitstream plane */
  450. data->p[0].addr = base_addr + ps->plane_size[2];
  451. data->p[0].len = ps->plane_size[0];
  452. if (!sde_mdp_is_ubwc_format(fmt))
  453. goto done;
  454. /* configure RGB metadata plane */
  455. data->p[2].addr = base_addr;
  456. data->p[2].len = ps->plane_size[2];
  457. }
  458. done:
  459. data->num_planes = ps->num_planes;
  460. end:
  461. if (data->num_planes != ps->num_planes) {
  462. SDEROT_ERR("num_planes don't match: fmt:%d, data:%d, ps:%d\n",
  463. fmt->format, data->num_planes, ps->num_planes);
  464. return -EINVAL;
  465. }
  466. inc = (sde_mdp_is_yuv_format(fmt) ? 1 : 2);
  467. for (i = 0; i < SDE_ROT_MAX_PLANES; i += inc) {
  468. if (data->p[i].len != ps->plane_size[i]) {
  469. SDEROT_ERR(
  470. "plane:%d fmt:%d, len does not match: data:%lu, ps:%d\n",
  471. i, fmt->format, data->p[i].len,
  472. ps->plane_size[i]);
  473. return -EINVAL;
  474. }
  475. }
  476. return 0;
  477. }
  478. int sde_mdp_data_check(struct sde_mdp_data *data,
  479. struct sde_mdp_plane_sizes *ps,
  480. struct sde_mdp_format_params *fmt)
  481. {
  482. struct sde_mdp_img_data *prev, *curr;
  483. int i;
  484. if (!ps)
  485. return 0;
  486. if (!data || data->num_planes == 0)
  487. return -ENOMEM;
  488. if (sde_mdp_is_tilea5x_format(fmt))
  489. return sde_mdp_a5x_data_check(data, ps, fmt);
  490. SDEROT_DBG("srcp0=%pa len=%lu frame_size=%u\n", &data->p[0].addr,
  491. data->p[0].len, ps->total_size);
  492. for (i = 0; i < ps->num_planes; i++) {
  493. curr = &data->p[i];
  494. if (i >= data->num_planes) {
  495. u32 psize = ps->plane_size[i-1];
  496. prev = &data->p[i-1];
  497. if (prev->len > psize) {
  498. curr->len = prev->len - psize;
  499. prev->len = psize;
  500. }
  501. curr->addr = prev->addr + psize;
  502. }
  503. if (curr->len < ps->plane_size[i]) {
  504. SDEROT_ERR("insufficient mem=%lu p=%d len=%u\n",
  505. curr->len, i, ps->plane_size[i]);
  506. return -ENOMEM;
  507. }
  508. SDEROT_DBG("plane[%d] addr=%pa len=%lu\n", i,
  509. &curr->addr, curr->len);
  510. }
  511. data->num_planes = ps->num_planes;
  512. return 0;
  513. }
  514. int sde_validate_offset_for_ubwc_format(
  515. struct sde_mdp_format_params *fmt, u16 x, u16 y)
  516. {
  517. int ret;
  518. u16 micro_w = 0, micro_h = 0;
  519. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  520. if (ret || !micro_w || !micro_h) {
  521. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  522. return -EINVAL;
  523. }
  524. if (x % (micro_w * UBWC_META_MACRO_W_H)) {
  525. SDEROT_ERR("x=%d does not align with meta width=%d\n", x,
  526. micro_w * UBWC_META_MACRO_W_H);
  527. return -EINVAL;
  528. }
  529. if (y % (micro_h * UBWC_META_MACRO_W_H)) {
  530. SDEROT_ERR("y=%d does not align with meta height=%d\n", y,
  531. UBWC_META_MACRO_W_H);
  532. return -EINVAL;
  533. }
  534. return ret;
  535. }
  536. /* x and y are assumed to be valid, expected to line up with start of tiles */
  537. void sde_rot_ubwc_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  538. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  539. {
  540. u16 macro_w, micro_w, micro_h;
  541. u32 offset = 0;
  542. int ret;
  543. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  544. if (ret || !micro_w || !micro_h) {
  545. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  546. return;
  547. }
  548. macro_w = 4 * micro_w;
  549. if (sde_mdp_is_nv12_8b_format(fmt)) {
  550. u16 chroma_macro_w = macro_w / 2;
  551. u16 chroma_micro_w = micro_w / 2;
  552. /* plane 1 and 3 are chroma, with sub sample of 2 */
  553. offset = y * ps->ystride[0] +
  554. (x / macro_w) * 4096;
  555. if (offset < data->p[0].len) {
  556. data->p[0].addr += offset;
  557. } else {
  558. ret = 1;
  559. goto done;
  560. }
  561. offset = y / 2 * ps->ystride[1] +
  562. ((x / 2) / chroma_macro_w) * 4096;
  563. if (offset < data->p[1].len) {
  564. data->p[1].addr += offset;
  565. } else {
  566. ret = 2;
  567. goto done;
  568. }
  569. offset = (y / micro_h) * ps->ystride[2] +
  570. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  571. UBWC_META_BLOCK_SIZE;
  572. if (offset < data->p[2].len) {
  573. data->p[2].addr += offset;
  574. } else {
  575. ret = 3;
  576. goto done;
  577. }
  578. offset = ((y / 2) / micro_h) * ps->ystride[3] +
  579. (((x / 2) / chroma_micro_w) / UBWC_META_MACRO_W_H) *
  580. UBWC_META_BLOCK_SIZE;
  581. if (offset < data->p[3].len) {
  582. data->p[3].addr += offset;
  583. } else {
  584. ret = 4;
  585. goto done;
  586. }
  587. } else if (sde_mdp_is_nv12_10b_format(fmt)) {
  588. /* TODO: */
  589. SDEROT_ERR("%c%c%c%c format not implemented yet",
  590. fmt->format >> 0, fmt->format >> 8,
  591. fmt->format >> 16, fmt->format >> 24);
  592. ret = 1;
  593. goto done;
  594. } else {
  595. offset = y * ps->ystride[0] +
  596. (x / macro_w) * 4096;
  597. if (offset < data->p[0].len) {
  598. data->p[0].addr += offset;
  599. } else {
  600. ret = 1;
  601. goto done;
  602. }
  603. offset = DIV_ROUND_UP(y, micro_h) * ps->ystride[2] +
  604. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  605. UBWC_META_BLOCK_SIZE;
  606. if (offset < data->p[2].len) {
  607. data->p[2].addr += offset;
  608. } else {
  609. ret = 3;
  610. goto done;
  611. }
  612. }
  613. done:
  614. if (ret) {
  615. WARN(1, "idx %d, offsets:%u too large for buflen%lu\n",
  616. (ret - 1), offset, data->p[(ret - 1)].len);
  617. }
  618. }
  619. void sde_rot_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  620. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  621. {
  622. if ((x == 0) && (y == 0))
  623. return;
  624. if (sde_mdp_is_tilea5x_format(fmt)) {
  625. sde_rot_ubwc_data_calc_offset(data, x, y, ps, fmt);
  626. return;
  627. }
  628. data->p[0].addr += y * ps->ystride[0];
  629. if (data->num_planes == 1) {
  630. data->p[0].addr += x * fmt->bpp;
  631. } else {
  632. u16 xoff, yoff;
  633. u8 v_subsample, h_subsample;
  634. sde_mdp_get_v_h_subsample_rate(fmt->chroma_sample,
  635. &v_subsample, &h_subsample);
  636. xoff = x / h_subsample;
  637. yoff = y / v_subsample;
  638. data->p[0].addr += x;
  639. data->p[1].addr += xoff + (yoff * ps->ystride[1]);
  640. if (data->num_planes == 2) /* pseudo planar */
  641. data->p[1].addr += xoff;
  642. else /* planar */
  643. data->p[2].addr += xoff + (yoff * ps->ystride[2]);
  644. }
  645. }
  646. static int sde_smmu_get_domain_type(u32 flags, bool rotator)
  647. {
  648. int type;
  649. if (flags & SDE_SECURE_OVERLAY_SESSION)
  650. type = SDE_IOMMU_DOMAIN_ROT_SECURE;
  651. else
  652. type = SDE_IOMMU_DOMAIN_ROT_UNSECURE;
  653. return type;
  654. }
  655. static int sde_mdp_is_map_needed(struct sde_mdp_img_data *data)
  656. {
  657. if (data->flags & SDE_SECURE_CAMERA_SESSION)
  658. return false;
  659. return true;
  660. }
  661. static int sde_mdp_put_img(struct sde_mdp_img_data *data, bool rotator,
  662. int dir)
  663. {
  664. u32 domain;
  665. if (data->flags & SDE_ROT_EXT_IOVA) {
  666. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  667. &data->addr, data->len);
  668. return 0;
  669. }
  670. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  671. SDEROT_DBG("ion hdl=%pK buf=0x%pa\n", data->srcp_dma_buf,
  672. &data->addr);
  673. if (sde_mdp_is_map_needed(data) && data->mapped) {
  674. domain = sde_smmu_get_domain_type(data->flags,
  675. rotator);
  676. data->mapped = false;
  677. SDEROT_DBG("unmap %pad/%lx d:%u f:%x\n", &data->addr,
  678. data->len, domain, data->flags);
  679. }
  680. if (!data->skip_detach) {
  681. data->srcp_attachment->dma_map_attrs |=
  682. DMA_ATTR_DELAYED_UNMAP;
  683. dma_buf_unmap_attachment(data->srcp_attachment,
  684. data->srcp_table, dir);
  685. dma_buf_detach(data->srcp_dma_buf,
  686. data->srcp_attachment);
  687. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  688. dma_buf_put(data->srcp_dma_buf);
  689. data->srcp_dma_buf = NULL;
  690. }
  691. data->skip_detach = true;
  692. }
  693. } else {
  694. return -ENOMEM;
  695. }
  696. return 0;
  697. }
  698. static int sde_mdp_get_img(struct sde_fb_data *img,
  699. struct sde_mdp_img_data *data, struct device *dev,
  700. bool rotator, int dir)
  701. {
  702. int ret = -EINVAL;
  703. u32 domain;
  704. data->flags |= img->flags;
  705. data->offset = img->offset;
  706. if (data->flags & SDE_ROT_EXT_DMA_BUF) {
  707. data->srcp_dma_buf = img->buffer;
  708. } else if (data->flags & SDE_ROT_EXT_IOVA) {
  709. data->addr = img->addr;
  710. data->len = img->len;
  711. SDEROT_DBG("use client %pad/%lx\n", &data->addr, data->len);
  712. return 0;
  713. } else if (IS_ERR(data->srcp_dma_buf)) {
  714. SDEROT_ERR("error on ion_import_fd\n");
  715. ret = PTR_ERR(data->srcp_dma_buf);
  716. data->srcp_dma_buf = NULL;
  717. return ret;
  718. }
  719. if (sde_mdp_is_map_needed(data)) {
  720. domain = sde_smmu_get_domain_type(data->flags, rotator);
  721. SDEROT_DBG("%d domain=%d ihndl=%pK\n",
  722. __LINE__, domain, data->srcp_dma_buf);
  723. data->srcp_attachment =
  724. sde_smmu_dma_buf_attach(data->srcp_dma_buf, dev,
  725. domain);
  726. if (IS_ERR(data->srcp_attachment)) {
  727. SDEROT_ERR("%d Failed to attach dma buf\n", __LINE__);
  728. ret = PTR_ERR(data->srcp_attachment);
  729. goto err_put;
  730. }
  731. } else {
  732. data->srcp_attachment = dma_buf_attach(
  733. data->srcp_dma_buf, dev);
  734. if (IS_ERR(data->srcp_attachment)) {
  735. SDEROT_ERR(
  736. "Failed to attach dma buf for secure camera\n");
  737. ret = PTR_ERR(data->srcp_attachment);
  738. goto err_put;
  739. }
  740. }
  741. SDEROT_DBG("%d attach=%pK\n", __LINE__, data->srcp_attachment);
  742. data->addr = 0;
  743. data->len = 0;
  744. data->mapped = false;
  745. data->skip_detach = false;
  746. /* return early, mapping will be done later */
  747. return 0;
  748. err_put:
  749. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  750. dma_buf_put(data->srcp_dma_buf);
  751. data->srcp_dma_buf = NULL;
  752. }
  753. return ret;
  754. }
  755. static int sde_mdp_map_buffer(struct sde_mdp_img_data *data, bool rotator,
  756. int dir)
  757. {
  758. int ret = -EINVAL;
  759. struct scatterlist *sg;
  760. struct sg_table *sgt = NULL;
  761. unsigned int i;
  762. unsigned long flags = 0;
  763. if (data->addr && data->len)
  764. return 0;
  765. if (data->flags & SDE_ROT_EXT_IOVA) {
  766. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  767. &data->addr, data->len);
  768. return 0;
  769. }
  770. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  771. /*
  772. * dma_buf_map_attachment will call into
  773. * dma_map_sg_attrs, and so all cache maintenance
  774. * attribute and lazy unmap attribute will be all
  775. * provided here.
  776. */
  777. data->srcp_attachment->dma_map_attrs |=
  778. DMA_ATTR_DELAYED_UNMAP;
  779. if (data->srcp_dma_buf && data->srcp_dma_buf->ops &&
  780. data->srcp_dma_buf->ops->get_flags) {
  781. if (data->srcp_dma_buf->ops->get_flags(
  782. data->srcp_dma_buf,
  783. &flags) == 0) {
  784. if ((flags & ION_FLAG_CACHED) == 0) {
  785. SDEROT_DBG("dmabuf is uncached type\n");
  786. data->srcp_attachment->dma_map_attrs |=
  787. DMA_ATTR_SKIP_CPU_SYNC;
  788. }
  789. }
  790. }
  791. sgt = dma_buf_map_attachment(
  792. data->srcp_attachment, dir);
  793. if (IS_ERR_OR_NULL(sgt) ||
  794. IS_ERR_OR_NULL(sgt->sgl)) {
  795. SDEROT_ERR("Failed to map attachment\n");
  796. ret = PTR_ERR(sgt);
  797. goto err_detach;
  798. }
  799. data->srcp_table = sgt;
  800. data->len = 0;
  801. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  802. data->len += sg->length;
  803. }
  804. if (sde_mdp_is_map_needed(data)) {
  805. data->addr = data->srcp_table->sgl->dma_address;
  806. SDEROT_DBG("map %pad/%lx f:%x\n",
  807. &data->addr,
  808. data->len,
  809. data->flags);
  810. data->mapped = true;
  811. ret = 0;
  812. } else {
  813. if (sgt->nents != 1) {
  814. SDEROT_ERR(
  815. "Fail ion buffer mapping for secure camera\n");
  816. ret = -EINVAL;
  817. goto err_unmap;
  818. }
  819. if (((uint64_t)sg_dma_address(sgt->sgl) >=
  820. PHY_ADDR_4G - sgt->sgl->length)) {
  821. SDEROT_ERR(
  822. "ion buffer mapped size invalid, size=%d\n",
  823. sgt->sgl->length);
  824. ret = -EINVAL;
  825. goto err_unmap;
  826. }
  827. data->addr = sg_phys(data->srcp_table->sgl);
  828. ret = 0;
  829. }
  830. }
  831. if (!data->addr) {
  832. SDEROT_ERR("start address is zero!\n");
  833. sde_mdp_put_img(data, rotator, dir);
  834. return -ENOMEM;
  835. }
  836. if (!ret && (data->offset < data->len)) {
  837. data->addr += data->offset;
  838. data->len -= data->offset;
  839. SDEROT_DBG("ihdl=%pK buf=0x%pa len=0x%lx\n",
  840. data->srcp_dma_buf, &data->addr, data->len);
  841. } else {
  842. sde_mdp_put_img(data, rotator, dir);
  843. return ret ? : -EOVERFLOW;
  844. }
  845. return ret;
  846. err_unmap:
  847. dma_buf_unmap_attachment(data->srcp_attachment, data->srcp_table, dir);
  848. err_detach:
  849. dma_buf_detach(data->srcp_dma_buf, data->srcp_attachment);
  850. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  851. dma_buf_put(data->srcp_dma_buf);
  852. data->srcp_dma_buf = NULL;
  853. }
  854. return ret;
  855. }
  856. static int sde_mdp_data_get(struct sde_mdp_data *data,
  857. struct sde_fb_data *planes, int num_planes, u32 flags,
  858. struct device *dev, bool rotator, int dir)
  859. {
  860. int i, rc = 0;
  861. if ((num_planes <= 0) || (num_planes > SDE_ROT_MAX_PLANES))
  862. return -EINVAL;
  863. for (i = 0; i < num_planes; i++) {
  864. data->p[i].flags = flags;
  865. rc = sde_mdp_get_img(&planes[i], &data->p[i], dev, rotator,
  866. dir);
  867. if (rc) {
  868. SDEROT_ERR("failed to get buf p=%d flags=%x\n",
  869. i, flags);
  870. while (i > 0) {
  871. i--;
  872. sde_mdp_put_img(&data->p[i], rotator, dir);
  873. }
  874. break;
  875. }
  876. }
  877. data->num_planes = i;
  878. return rc;
  879. }
  880. int sde_mdp_data_map(struct sde_mdp_data *data, bool rotator, int dir)
  881. {
  882. int i, rc = 0;
  883. if (!data || !data->num_planes || data->num_planes > SDE_ROT_MAX_PLANES)
  884. return -EINVAL;
  885. for (i = 0; i < data->num_planes; i++) {
  886. rc = sde_mdp_map_buffer(&data->p[i], rotator, dir);
  887. if (rc) {
  888. SDEROT_ERR("failed to map buf p=%d\n", i);
  889. while (i > 0) {
  890. i--;
  891. sde_mdp_put_img(&data->p[i], rotator, dir);
  892. }
  893. break;
  894. }
  895. }
  896. SDEROT_EVTLOG(data->num_planes, dir, data->p[0].addr, data->p[0].len,
  897. data->p[0].mapped);
  898. return rc;
  899. }
  900. void sde_mdp_data_free(struct sde_mdp_data *data, bool rotator, int dir)
  901. {
  902. int i;
  903. sde_smmu_ctrl(1);
  904. for (i = 0; i < data->num_planes && data->p[i].len; i++)
  905. sde_mdp_put_img(&data->p[i], rotator, dir);
  906. sde_smmu_ctrl(0);
  907. data->num_planes = 0;
  908. }
  909. int sde_mdp_data_get_and_validate_size(struct sde_mdp_data *data,
  910. struct sde_fb_data *planes, int num_planes, u32 flags,
  911. struct device *dev, bool rotator, int dir,
  912. struct sde_layer_buffer *buffer)
  913. {
  914. struct sde_mdp_format_params *fmt;
  915. struct sde_mdp_plane_sizes ps;
  916. int ret, i;
  917. unsigned long total_buf_len = 0;
  918. fmt = sde_get_format_params(buffer->format);
  919. if (!fmt) {
  920. SDEROT_ERR("Format %d not supported\n", buffer->format);
  921. return -EINVAL;
  922. }
  923. ret = sde_mdp_data_get(data, planes, num_planes,
  924. flags, dev, rotator, dir);
  925. if (ret)
  926. return ret;
  927. sde_mdp_get_plane_sizes(fmt, buffer->width, buffer->height, &ps, 0, 0);
  928. for (i = 0; i < num_planes ; i++) {
  929. unsigned long plane_len = (data->p[i].srcp_dma_buf) ?
  930. data->p[i].srcp_dma_buf->size : data->p[i].len;
  931. if (plane_len < planes[i].offset) {
  932. SDEROT_ERR("Offset=%d larger than buffer size=%lu\n",
  933. planes[i].offset, plane_len);
  934. ret = -EINVAL;
  935. goto buf_too_small;
  936. }
  937. total_buf_len += plane_len - planes[i].offset;
  938. }
  939. if (total_buf_len < ps.total_size) {
  940. SDEROT_ERR("Buffer size=%lu, expected size=%d\n",
  941. total_buf_len,
  942. ps.total_size);
  943. ret = -EINVAL;
  944. goto buf_too_small;
  945. }
  946. return 0;
  947. buf_too_small:
  948. sde_mdp_data_free(data, rotator, dir);
  949. return ret;
  950. }
  951. static struct sg_table *sde_rot_dmabuf_map_tiny(
  952. struct dma_buf_attachment *attach, enum dma_data_direction dir)
  953. {
  954. struct sde_mdp_img_data *data = attach->dmabuf->priv;
  955. struct sg_table *sgt;
  956. unsigned int order;
  957. struct page *p;
  958. if (!data) {
  959. SDEROT_ERR("NULL img data\n");
  960. return NULL;
  961. }
  962. if (data->len > PAGE_SIZE) {
  963. SDEROT_ERR("DMA buffer size is larger than %ld, bufsize:%ld\n",
  964. PAGE_SIZE, data->len);
  965. return NULL;
  966. }
  967. order = get_order(data->len);
  968. p = alloc_pages(GFP_KERNEL, order);
  969. if (!p) {
  970. SDEROT_ERR("Fail allocating page for datasize:%ld\n",
  971. data->len);
  972. return NULL;
  973. }
  974. sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
  975. if (!sgt)
  976. goto free_alloc_pages;
  977. /* only alloc a single page */
  978. if (sg_alloc_table(sgt, 1, GFP_KERNEL)) {
  979. SDEROT_ERR("fail sg_alloc_table\n");
  980. goto free_sgt;
  981. }
  982. sg_set_page(sgt->sgl, p, data->len, 0);
  983. if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) {
  984. SDEROT_ERR("fail dma_map_sg\n");
  985. goto free_table;
  986. }
  987. SDEROT_DBG("Successful generate sg_table:%pK datalen:%ld\n",
  988. sgt, data->len);
  989. return sgt;
  990. free_table:
  991. sg_free_table(sgt);
  992. free_sgt:
  993. kfree(sgt);
  994. free_alloc_pages:
  995. __free_pages(p, order);
  996. return NULL;
  997. }
  998. static void sde_rot_dmabuf_unmap(struct dma_buf_attachment *attach,
  999. struct sg_table *sgt, enum dma_data_direction dir)
  1000. {
  1001. struct scatterlist *sg;
  1002. int i;
  1003. SDEROT_DBG("DMABUF unmap, sgt:%pK\n", sgt);
  1004. dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir);
  1005. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  1006. put_page(sg_page(sg));
  1007. __free_page(sg_page(sg));
  1008. }
  1009. sg_free_table(sgt);
  1010. kfree(sgt);
  1011. }
  1012. static void *sde_rot_dmabuf_no_map(struct dma_buf *buf, unsigned long n)
  1013. {
  1014. SDEROT_WARN("NOT SUPPORTING dmabuf map\n");
  1015. return NULL;
  1016. }
  1017. static void sde_rot_dmabuf_no_unmap(struct dma_buf *buf, unsigned long n,
  1018. void *addr)
  1019. {
  1020. SDEROT_WARN("NOT SUPPORTING dmabuf unmap\n");
  1021. }
  1022. static void sde_rot_dmabuf_release(struct dma_buf *buf)
  1023. {
  1024. SDEROT_DBG("Release dmabuf:%pK\n", buf);
  1025. }
  1026. static int sde_rot_dmabuf_no_mmap(struct dma_buf *buf,
  1027. struct vm_area_struct *vma)
  1028. {
  1029. SDEROT_WARN("NOT SUPPORTING dmabuf mmap\n");
  1030. return -EINVAL;
  1031. }
  1032. static const struct dma_buf_ops sde_rot_dmabuf_ops = {
  1033. .map_dma_buf = sde_rot_dmabuf_map_tiny,
  1034. .unmap_dma_buf = sde_rot_dmabuf_unmap,
  1035. .release = sde_rot_dmabuf_release,
  1036. .map = sde_rot_dmabuf_no_map,
  1037. .unmap = sde_rot_dmabuf_no_unmap,
  1038. .mmap = sde_rot_dmabuf_no_mmap,
  1039. };
  1040. struct dma_buf *sde_rot_get_dmabuf(struct sde_mdp_img_data *data)
  1041. {
  1042. DEFINE_DMA_BUF_EXPORT_INFO(exp_info);
  1043. exp_info.ops = &sde_rot_dmabuf_ops;
  1044. exp_info.size = (size_t)data->len;
  1045. exp_info.flags = O_RDWR;
  1046. exp_info.priv = data;
  1047. return dma_buf_export(&exp_info);
  1048. }