sde_rotator_util.c 31 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2022-2024 Qualcomm Innovation Center, Inc. All rights reserved.
  4. * Copyright (c) 2012, 2015-2019, 2021, The Linux Foundation. All rights reserved.
  5. */
  6. #define pr_fmt(fmt) "%s: " fmt, __func__
  7. #include <linux/qcom-dma-mapping.h>
  8. #include <linux/dma-mapping.h>
  9. #include <linux/errno.h>
  10. #include <linux/file.h>
  11. #include <linux/spinlock.h>
  12. #include <linux/types.h>
  13. #include <linux/major.h>
  14. #include <linux/dma-buf.h>
  15. #include <linux/debugfs.h>
  16. #include <linux/delay.h>
  17. #include <linux/wait.h>
  18. #include <linux/clk.h>
  19. #include <linux/slab.h>
  20. #include <linux/io.h>
  21. #include <linux/iopoll.h>
  22. #include <linux/regulator/consumer.h>
  23. #include <linux/module.h>
  24. #include <media/mmm_color_fmt.h>
  25. #include <linux/videodev2.h>
  26. #include <linux/ion.h>
  27. #include "sde_rotator_util.h"
  28. #include "sde_rotator_smmu.h"
  29. #include "sde_rotator_debug.h"
  30. #define Y_TILEWIDTH 48
  31. #define Y_TILEHEIGHT 4
  32. #define UV_TILEWIDTH 48
  33. #define UV_TILEHEIGHT 8
  34. #define TILEWIDTH_SIZE 64
  35. #define TILEHEIGHT_SIZE 4
  36. void sde_mdp_get_v_h_subsample_rate(u8 chroma_sample,
  37. u8 *v_sample, u8 *h_sample)
  38. {
  39. switch (chroma_sample) {
  40. case SDE_MDP_CHROMA_H2V1:
  41. *v_sample = 1;
  42. *h_sample = 2;
  43. break;
  44. case SDE_MDP_CHROMA_H1V2:
  45. *v_sample = 2;
  46. *h_sample = 1;
  47. break;
  48. case SDE_MDP_CHROMA_420:
  49. *v_sample = 2;
  50. *h_sample = 2;
  51. break;
  52. default:
  53. *v_sample = 1;
  54. *h_sample = 1;
  55. break;
  56. }
  57. }
  58. void sde_rot_intersect_rect(struct sde_rect *res_rect,
  59. const struct sde_rect *dst_rect,
  60. const struct sde_rect *sci_rect)
  61. {
  62. int l = max(dst_rect->x, sci_rect->x);
  63. int t = max(dst_rect->y, sci_rect->y);
  64. int r = min((dst_rect->x + dst_rect->w), (sci_rect->x + sci_rect->w));
  65. int b = min((dst_rect->y + dst_rect->h), (sci_rect->y + sci_rect->h));
  66. if (r < l || b < t)
  67. *res_rect = (struct sde_rect){0, 0, 0, 0};
  68. else
  69. *res_rect = (struct sde_rect){l, t, (r-l), (b-t)};
  70. }
  71. void sde_rot_crop_rect(struct sde_rect *src_rect,
  72. struct sde_rect *dst_rect,
  73. const struct sde_rect *sci_rect)
  74. {
  75. struct sde_rect res;
  76. sde_rot_intersect_rect(&res, dst_rect, sci_rect);
  77. if (res.w && res.h) {
  78. if ((res.w != dst_rect->w) || (res.h != dst_rect->h)) {
  79. src_rect->x = src_rect->x + (res.x - dst_rect->x);
  80. src_rect->y = src_rect->y + (res.y - dst_rect->y);
  81. src_rect->w = res.w;
  82. src_rect->h = res.h;
  83. }
  84. *dst_rect = (struct sde_rect)
  85. {(res.x - sci_rect->x), (res.y - sci_rect->y),
  86. res.w, res.h};
  87. }
  88. }
  89. /*
  90. * sde_rect_cmp() - compares two rects
  91. * @rect1 - rect value to compare
  92. * @rect2 - rect value to compare
  93. *
  94. * Returns 1 if the rects are same, 0 otherwise.
  95. */
  96. int sde_rect_cmp(struct sde_rect *rect1, struct sde_rect *rect2)
  97. {
  98. return rect1->x == rect2->x && rect1->y == rect2->y &&
  99. rect1->w == rect2->w && rect1->h == rect2->h;
  100. }
  101. /*
  102. * sde_rect_overlap_check() - compare two rects and check if they overlap
  103. * @rect1 - rect value to compare
  104. * @rect2 - rect value to compare
  105. *
  106. * Returns true if rects overlap, false otherwise.
  107. */
  108. bool sde_rect_overlap_check(struct sde_rect *rect1, struct sde_rect *rect2)
  109. {
  110. u32 rect1_left = rect1->x, rect1_right = rect1->x + rect1->w;
  111. u32 rect1_top = rect1->y, rect1_bottom = rect1->y + rect1->h;
  112. u32 rect2_left = rect2->x, rect2_right = rect2->x + rect2->w;
  113. u32 rect2_top = rect2->y, rect2_bottom = rect2->y + rect2->h;
  114. if ((rect1_right <= rect2_left) ||
  115. (rect1_left >= rect2_right) ||
  116. (rect1_bottom <= rect2_top) ||
  117. (rect1_top >= rect2_bottom))
  118. return false;
  119. return true;
  120. }
  121. int sde_mdp_get_rau_strides(u32 w, u32 h,
  122. struct sde_mdp_format_params *fmt,
  123. struct sde_mdp_plane_sizes *ps)
  124. {
  125. if (fmt->is_yuv) {
  126. ps->rau_cnt = DIV_ROUND_UP(w, 64);
  127. ps->ystride[0] = 64 * 4;
  128. ps->rau_h[0] = 4;
  129. ps->rau_h[1] = 2;
  130. if (fmt->chroma_sample == SDE_MDP_CHROMA_H1V2)
  131. ps->ystride[1] = 64 * 2;
  132. else if (fmt->chroma_sample == SDE_MDP_CHROMA_H2V1) {
  133. ps->ystride[1] = 32 * 4;
  134. ps->rau_h[1] = 4;
  135. } else
  136. ps->ystride[1] = 32 * 2;
  137. /* account for both chroma components */
  138. ps->ystride[1] <<= 1;
  139. } else if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  140. ps->rau_cnt = DIV_ROUND_UP(w, 32);
  141. ps->ystride[0] = 32 * 4 * fmt->bpp;
  142. ps->ystride[1] = 0;
  143. ps->rau_h[0] = 4;
  144. ps->rau_h[1] = 0;
  145. } else {
  146. SDEROT_ERR("Invalid format=%d\n", fmt->format);
  147. return -EINVAL;
  148. }
  149. ps->ystride[0] *= ps->rau_cnt;
  150. ps->ystride[1] *= ps->rau_cnt;
  151. ps->num_planes = 2;
  152. SDEROT_DBG("BWC rau_cnt=%d strides={%d,%d} heights={%d,%d}\n",
  153. ps->rau_cnt, ps->ystride[0], ps->ystride[1],
  154. ps->rau_h[0], ps->rau_h[1]);
  155. return 0;
  156. }
  157. static int sde_mdp_get_a5x_plane_size(struct sde_mdp_format_params *fmt,
  158. u32 width, u32 height, struct sde_mdp_plane_sizes *ps)
  159. {
  160. int rc = 0;
  161. if (sde_mdp_is_nv12_8b_format(fmt)) {
  162. ps->num_planes = 2;
  163. /* Y bitstream stride and plane size */
  164. ps->ystride[0] = ALIGN(width, 128);
  165. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 32),
  166. 4096);
  167. /* CbCr bitstream stride and plane size */
  168. ps->ystride[1] = ALIGN(width, 128);
  169. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  170. ALIGN(height / 2, 32), 4096);
  171. if (!sde_mdp_is_ubwc_format(fmt))
  172. goto done;
  173. ps->num_planes += 2;
  174. /* Y meta data stride and plane size */
  175. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  176. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  177. ALIGN(DIV_ROUND_UP(height, 8), 16), 4096);
  178. /* CbCr meta data stride and plane size */
  179. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  180. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  181. ALIGN(DIV_ROUND_UP(height / 2, 8), 16), 4096);
  182. } else if (sde_mdp_is_p010_format(fmt)) {
  183. ps->num_planes = 2;
  184. /* Y bitstream stride and plane size */
  185. ps->ystride[0] = ALIGN(width * 2, 256);
  186. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 16),
  187. 4096);
  188. /* CbCr bitstream stride and plane size */
  189. ps->ystride[1] = ALIGN(width * 2, 256);
  190. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  191. ALIGN(height / 2, 16), 4096);
  192. if (!sde_mdp_is_ubwc_format(fmt))
  193. goto done;
  194. ps->num_planes += 2;
  195. /* Y meta data stride and plane size */
  196. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  197. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  198. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  199. /* CbCr meta data stride and plane size */
  200. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  201. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  202. ALIGN(DIV_ROUND_UP(height / 2, 4), 16), 4096);
  203. } else if (sde_mdp_is_tp10_format(fmt)) {
  204. u32 yWidth = sde_mdp_general_align(width, 192);
  205. u32 yHeight = ALIGN(height, 16);
  206. u32 uvWidth = sde_mdp_general_align(width, 192);
  207. u32 uvHeight = ALIGN(height, 32);
  208. ps->num_planes = 2;
  209. /* Y bitstream stride and plane size */
  210. ps->ystride[0] = yWidth * TILEWIDTH_SIZE / Y_TILEWIDTH;
  211. ps->plane_size[0] = ALIGN(ps->ystride[0] *
  212. (yHeight * TILEHEIGHT_SIZE / Y_TILEHEIGHT),
  213. 4096);
  214. /* CbCr bitstream stride and plane size */
  215. ps->ystride[1] = uvWidth * TILEWIDTH_SIZE / UV_TILEWIDTH;
  216. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  217. (uvHeight * TILEHEIGHT_SIZE / UV_TILEHEIGHT),
  218. 4096);
  219. if (!sde_mdp_is_ubwc_format(fmt))
  220. goto done;
  221. ps->num_planes += 2;
  222. /* Y meta data stride and plane size */
  223. ps->ystride[2] = ALIGN(yWidth / Y_TILEWIDTH, 64);
  224. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  225. ALIGN((yHeight / Y_TILEHEIGHT), 16), 4096);
  226. /* CbCr meta data stride and plane size */
  227. ps->ystride[3] = ALIGN(uvWidth / UV_TILEWIDTH, 64);
  228. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  229. ALIGN((uvHeight / UV_TILEHEIGHT), 16), 4096);
  230. } else if (sde_mdp_is_rgb_format(fmt)) {
  231. uint32_t stride_alignment, bpp, aligned_bitstream_width;
  232. if (fmt->format == SDE_PIX_FMT_RGB_565_UBWC) {
  233. stride_alignment = 128;
  234. bpp = 2;
  235. } else {
  236. stride_alignment = 64;
  237. bpp = 4;
  238. }
  239. ps->num_planes = 1;
  240. /* RGB bitstream stride and plane size */
  241. aligned_bitstream_width = ALIGN(width, stride_alignment);
  242. ps->ystride[0] = aligned_bitstream_width * bpp;
  243. ps->plane_size[0] = ALIGN(bpp * aligned_bitstream_width *
  244. ALIGN(height, 16), 4096);
  245. if (!sde_mdp_is_ubwc_format(fmt))
  246. goto done;
  247. ps->num_planes += 1;
  248. /* RGB meta data stride and plane size */
  249. ps->ystride[2] = ALIGN(DIV_ROUND_UP(aligned_bitstream_width,
  250. 16), 64);
  251. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  252. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  253. } else {
  254. SDEROT_ERR("%s: UBWC format not supported for fmt:%d\n",
  255. __func__, fmt->format);
  256. rc = -EINVAL;
  257. }
  258. done:
  259. return rc;
  260. }
  261. int sde_mdp_get_plane_sizes(struct sde_mdp_format_params *fmt, u32 w, u32 h,
  262. struct sde_mdp_plane_sizes *ps, u32 bwc_mode, bool rotation)
  263. {
  264. int i, rc = 0;
  265. u32 bpp;
  266. if (ps == NULL)
  267. return -EINVAL;
  268. if ((w > SDE_ROT_MAX_IMG_WIDTH) || (h > SDE_ROT_MAX_IMG_HEIGHT))
  269. return -ERANGE;
  270. bpp = fmt->bpp;
  271. memset(ps, 0, sizeof(struct sde_mdp_plane_sizes));
  272. if (sde_mdp_is_tilea5x_format(fmt)) {
  273. rc = sde_mdp_get_a5x_plane_size(fmt, w, h, ps);
  274. } else if (bwc_mode) {
  275. u32 height, meta_size;
  276. rc = sde_mdp_get_rau_strides(w, h, fmt, ps);
  277. if (rc)
  278. return rc;
  279. height = DIV_ROUND_UP(h, ps->rau_h[0]);
  280. meta_size = DIV_ROUND_UP(ps->rau_cnt, 8);
  281. ps->ystride[1] += meta_size;
  282. ps->ystride[0] += ps->ystride[1] + meta_size;
  283. ps->plane_size[0] = ps->ystride[0] * height;
  284. ps->ystride[1] = 2;
  285. ps->plane_size[1] = 2 * ps->rau_cnt * height;
  286. SDEROT_DBG("BWC data stride=%d size=%d meta size=%d\n",
  287. ps->ystride[0], ps->plane_size[0], ps->plane_size[1]);
  288. } else {
  289. if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  290. ps->num_planes = 1;
  291. ps->plane_size[0] = w * h * bpp;
  292. ps->ystride[0] = w * bpp;
  293. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_VENUS ||
  294. fmt->format == SDE_PIX_FMT_Y_CRCB_H2V2_VENUS ||
  295. fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS) {
  296. int cf;
  297. switch (fmt->format) {
  298. case SDE_PIX_FMT_Y_CBCR_H2V2_VENUS:
  299. cf = MMM_COLOR_FMT_NV12;
  300. break;
  301. case SDE_PIX_FMT_Y_CRCB_H2V2_VENUS:
  302. cf = MMM_COLOR_FMT_NV21;
  303. break;
  304. case SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS:
  305. cf = MMM_COLOR_FMT_P010;
  306. break;
  307. default:
  308. SDEROT_ERR("unknown color format %d\n",
  309. fmt->format);
  310. return -EINVAL;
  311. }
  312. ps->num_planes = 2;
  313. ps->ystride[0] = MMM_COLOR_FMT_Y_STRIDE(cf, w);
  314. ps->ystride[1] = MMM_COLOR_FMT_UV_STRIDE(cf, w);
  315. ps->plane_size[0] = MMM_COLOR_FMT_Y_SCANLINES(cf, h) *
  316. ps->ystride[0];
  317. ps->plane_size[1] = MMM_COLOR_FMT_UV_SCANLINES(cf, h) *
  318. ps->ystride[1];
  319. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010) {
  320. /*
  321. * |<---Y1--->000000<---Y0--->000000| Plane0
  322. * |rrrrrrrrrr000000bbbbbbbbbb000000| Plane1
  323. * |--------------------------------|
  324. * 33222222222211111111110000000000 Bit
  325. * 10987654321098765432109876543210 Location
  326. */
  327. ps->num_planes = 2;
  328. ps->ystride[0] = w * 2;
  329. ps->ystride[1] = w * 2;
  330. ps->plane_size[0] = ps->ystride[0] * h;
  331. ps->plane_size[1] = ps->ystride[1] * h / 2;
  332. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_TP10) {
  333. u32 yWidth = sde_mdp_general_align(w, 192);
  334. u32 yHeight = ALIGN(h, 16);
  335. u32 uvWidth = sde_mdp_general_align(w, 192);
  336. u32 uvHeight = (ALIGN(h, 32)) / 2;
  337. ps->num_planes = 2;
  338. ps->ystride[0] = (yWidth / 3) * 4;
  339. ps->ystride[1] = (uvWidth / 3) * 4;
  340. ps->plane_size[0] = ALIGN(ps->ystride[0] * yHeight,
  341. 4096);
  342. ps->plane_size[1] = ALIGN(ps->ystride[1] * uvHeight,
  343. 4096);
  344. } else {
  345. u8 v_subsample, h_subsample, stride_align, height_align;
  346. u32 chroma_samp;
  347. chroma_samp = fmt->chroma_sample;
  348. sde_mdp_get_v_h_subsample_rate(chroma_samp,
  349. &v_subsample, &h_subsample);
  350. switch (fmt->format) {
  351. case SDE_PIX_FMT_Y_CR_CB_GH2V2:
  352. stride_align = 16;
  353. height_align = 1;
  354. break;
  355. default:
  356. stride_align = 1;
  357. height_align = 1;
  358. break;
  359. }
  360. ps->ystride[0] = ALIGN(w, stride_align);
  361. ps->ystride[1] = ALIGN(w / h_subsample, stride_align);
  362. ps->plane_size[0] = ps->ystride[0] *
  363. ALIGN(h, height_align);
  364. ps->plane_size[1] = ps->ystride[1] * (h / v_subsample);
  365. if (fmt->fetch_planes == SDE_MDP_PLANE_PSEUDO_PLANAR) {
  366. ps->num_planes = 2;
  367. ps->plane_size[1] *= 2;
  368. ps->ystride[1] *= 2;
  369. } else { /* planar */
  370. ps->num_planes = 3;
  371. ps->plane_size[2] = ps->plane_size[1];
  372. ps->ystride[2] = ps->ystride[1];
  373. }
  374. }
  375. }
  376. /* Safe to use MAX_PLANES as ps is memset at start of function */
  377. for (i = 0; i < SDE_ROT_MAX_PLANES; i++)
  378. ps->total_size += ps->plane_size[i];
  379. return rc;
  380. }
  381. static int sde_mdp_a5x_data_check(struct sde_mdp_data *data,
  382. struct sde_mdp_plane_sizes *ps,
  383. struct sde_mdp_format_params *fmt)
  384. {
  385. int i, inc;
  386. unsigned long data_size = 0;
  387. dma_addr_t base_addr;
  388. if (data->p[0].len == ps->plane_size[0])
  389. goto end;
  390. /* From this point, assumption is plane 0 is to be divided */
  391. data_size = data->p[0].len;
  392. if (data_size < ps->total_size) {
  393. SDEROT_ERR(
  394. "insufficient current mem len=%lu required mem len=%u\n",
  395. data_size, ps->total_size);
  396. return -ENOMEM;
  397. }
  398. base_addr = data->p[0].addr;
  399. if (sde_mdp_is_yuv_format(fmt)) {
  400. /************************************************/
  401. /* UBWC ** */
  402. /* buffer ** MDP PLANE */
  403. /* format ** */
  404. /************************************************/
  405. /* ------------------- ** -------------------- */
  406. /* | Y meta | ** | Y bitstream | */
  407. /* | data | ** | plane | */
  408. /* ------------------- ** -------------------- */
  409. /* | Y bitstream | ** | CbCr bitstream | */
  410. /* | data | ** | plane | */
  411. /* ------------------- ** -------------------- */
  412. /* | Cbcr metadata | ** | Y meta | */
  413. /* | data | ** | plane | */
  414. /* ------------------- ** -------------------- */
  415. /* | CbCr bitstream | ** | CbCr meta | */
  416. /* | data | ** | plane | */
  417. /* ------------------- ** -------------------- */
  418. /************************************************/
  419. /* configure Y bitstream plane */
  420. data->p[0].addr = base_addr + ps->plane_size[2];
  421. data->p[0].len = ps->plane_size[0];
  422. /* configure CbCr bitstream plane */
  423. data->p[1].addr = base_addr + ps->plane_size[0]
  424. + ps->plane_size[2] + ps->plane_size[3];
  425. data->p[1].len = ps->plane_size[1];
  426. if (!sde_mdp_is_ubwc_format(fmt))
  427. goto done;
  428. /* configure Y metadata plane */
  429. data->p[2].addr = base_addr;
  430. data->p[2].len = ps->plane_size[2];
  431. /* configure CbCr metadata plane */
  432. data->p[3].addr = base_addr + ps->plane_size[0]
  433. + ps->plane_size[2];
  434. data->p[3].len = ps->plane_size[3];
  435. } else {
  436. /************************************************/
  437. /* UBWC ** */
  438. /* buffer ** MDP PLANE */
  439. /* format ** */
  440. /************************************************/
  441. /* ------------------- ** -------------------- */
  442. /* | RGB meta | ** | RGB bitstream | */
  443. /* | data | ** | plane | */
  444. /* ------------------- ** -------------------- */
  445. /* | RGB bitstream | ** | NONE | */
  446. /* | data | ** | | */
  447. /* ------------------- ** -------------------- */
  448. /* ** | RGB meta | */
  449. /* ** | plane | */
  450. /* ** -------------------- */
  451. /************************************************/
  452. /* configure RGB bitstream plane */
  453. data->p[0].addr = base_addr + ps->plane_size[2];
  454. data->p[0].len = ps->plane_size[0];
  455. if (!sde_mdp_is_ubwc_format(fmt))
  456. goto done;
  457. /* configure RGB metadata plane */
  458. data->p[2].addr = base_addr;
  459. data->p[2].len = ps->plane_size[2];
  460. }
  461. done:
  462. data->num_planes = ps->num_planes;
  463. end:
  464. if (data->num_planes != ps->num_planes) {
  465. SDEROT_ERR("num_planes don't match: fmt:%d, data:%d, ps:%d\n",
  466. fmt->format, data->num_planes, ps->num_planes);
  467. return -EINVAL;
  468. }
  469. inc = (sde_mdp_is_yuv_format(fmt) ? 1 : 2);
  470. for (i = 0; i < SDE_ROT_MAX_PLANES; i += inc) {
  471. if (data->p[i].len != ps->plane_size[i]) {
  472. SDEROT_ERR(
  473. "plane:%d fmt:%d, len does not match: data:%lu, ps:%d\n",
  474. i, fmt->format, data->p[i].len,
  475. ps->plane_size[i]);
  476. return -EINVAL;
  477. }
  478. }
  479. return 0;
  480. }
  481. int sde_mdp_data_check(struct sde_mdp_data *data,
  482. struct sde_mdp_plane_sizes *ps,
  483. struct sde_mdp_format_params *fmt)
  484. {
  485. struct sde_mdp_img_data *prev, *curr;
  486. int i;
  487. if (!ps)
  488. return 0;
  489. if (!data || data->num_planes == 0)
  490. return -ENOMEM;
  491. if (sde_mdp_is_tilea5x_format(fmt))
  492. return sde_mdp_a5x_data_check(data, ps, fmt);
  493. SDEROT_DBG("srcp0=%pa len=%lu frame_size=%u\n", &data->p[0].addr,
  494. data->p[0].len, ps->total_size);
  495. for (i = 0; i < ps->num_planes; i++) {
  496. curr = &data->p[i];
  497. if (i >= data->num_planes) {
  498. u32 psize = ps->plane_size[i-1];
  499. prev = &data->p[i-1];
  500. if (prev->len > psize) {
  501. curr->len = prev->len - psize;
  502. prev->len = psize;
  503. }
  504. curr->addr = prev->addr + psize;
  505. }
  506. if (curr->len < ps->plane_size[i]) {
  507. SDEROT_ERR("insufficient mem=%lu p=%d len=%u\n",
  508. curr->len, i, ps->plane_size[i]);
  509. return -ENOMEM;
  510. }
  511. SDEROT_DBG("plane[%d] addr=%pa len=%lu\n", i,
  512. &curr->addr, curr->len);
  513. }
  514. data->num_planes = ps->num_planes;
  515. return 0;
  516. }
  517. int sde_validate_offset_for_ubwc_format(
  518. struct sde_mdp_format_params *fmt, u16 x, u16 y)
  519. {
  520. int ret;
  521. u16 micro_w = 0, micro_h = 0;
  522. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  523. if (ret || !micro_w || !micro_h) {
  524. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  525. return -EINVAL;
  526. }
  527. if (x % (micro_w * UBWC_META_MACRO_W_H)) {
  528. SDEROT_ERR("x=%d does not align with meta width=%d\n", x,
  529. micro_w * UBWC_META_MACRO_W_H);
  530. return -EINVAL;
  531. }
  532. if (y % (micro_h * UBWC_META_MACRO_W_H)) {
  533. SDEROT_ERR("y=%d does not align with meta height=%d\n", y,
  534. UBWC_META_MACRO_W_H);
  535. return -EINVAL;
  536. }
  537. return ret;
  538. }
  539. /* x and y are assumed to be valid, expected to line up with start of tiles */
  540. void sde_rot_ubwc_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  541. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  542. {
  543. u16 macro_w, micro_w, micro_h;
  544. u32 offset = 0;
  545. int ret;
  546. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  547. if (ret || !micro_w || !micro_h) {
  548. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  549. return;
  550. }
  551. macro_w = 4 * micro_w;
  552. if (sde_mdp_is_nv12_8b_format(fmt)) {
  553. u16 chroma_macro_w = macro_w / 2;
  554. u16 chroma_micro_w = micro_w / 2;
  555. /* plane 1 and 3 are chroma, with sub sample of 2 */
  556. offset = y * ps->ystride[0] +
  557. (x / macro_w) * 4096;
  558. if (offset < data->p[0].len) {
  559. data->p[0].addr += offset;
  560. } else {
  561. ret = 1;
  562. goto done;
  563. }
  564. offset = y / 2 * ps->ystride[1] +
  565. ((x / 2) / chroma_macro_w) * 4096;
  566. if (offset < data->p[1].len) {
  567. data->p[1].addr += offset;
  568. } else {
  569. ret = 2;
  570. goto done;
  571. }
  572. offset = (y / micro_h) * ps->ystride[2] +
  573. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  574. UBWC_META_BLOCK_SIZE;
  575. if (offset < data->p[2].len) {
  576. data->p[2].addr += offset;
  577. } else {
  578. ret = 3;
  579. goto done;
  580. }
  581. offset = ((y / 2) / micro_h) * ps->ystride[3] +
  582. (((x / 2) / chroma_micro_w) / UBWC_META_MACRO_W_H) *
  583. UBWC_META_BLOCK_SIZE;
  584. if (offset < data->p[3].len) {
  585. data->p[3].addr += offset;
  586. } else {
  587. ret = 4;
  588. goto done;
  589. }
  590. } else if (sde_mdp_is_nv12_10b_format(fmt)) {
  591. /* TODO: */
  592. SDEROT_ERR("%c%c%c%c format not implemented yet",
  593. fmt->format >> 0, fmt->format >> 8,
  594. fmt->format >> 16, fmt->format >> 24);
  595. ret = 1;
  596. goto done;
  597. } else {
  598. offset = y * ps->ystride[0] +
  599. (x / macro_w) * 4096;
  600. if (offset < data->p[0].len) {
  601. data->p[0].addr += offset;
  602. } else {
  603. ret = 1;
  604. goto done;
  605. }
  606. offset = DIV_ROUND_UP(y, micro_h) * ps->ystride[2] +
  607. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  608. UBWC_META_BLOCK_SIZE;
  609. if (offset < data->p[2].len) {
  610. data->p[2].addr += offset;
  611. } else {
  612. ret = 3;
  613. goto done;
  614. }
  615. }
  616. done:
  617. if (ret) {
  618. WARN(1, "idx %d, offsets:%u too large for buflen%lu\n",
  619. (ret - 1), offset, data->p[(ret - 1)].len);
  620. }
  621. }
  622. void sde_rot_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  623. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  624. {
  625. if ((x == 0) && (y == 0))
  626. return;
  627. if (sde_mdp_is_tilea5x_format(fmt)) {
  628. sde_rot_ubwc_data_calc_offset(data, x, y, ps, fmt);
  629. return;
  630. }
  631. data->p[0].addr += y * ps->ystride[0];
  632. if (data->num_planes == 1) {
  633. data->p[0].addr += x * fmt->bpp;
  634. } else {
  635. u16 xoff, yoff;
  636. u8 v_subsample, h_subsample;
  637. sde_mdp_get_v_h_subsample_rate(fmt->chroma_sample,
  638. &v_subsample, &h_subsample);
  639. xoff = x / h_subsample;
  640. yoff = y / v_subsample;
  641. data->p[0].addr += x;
  642. data->p[1].addr += xoff + (yoff * ps->ystride[1]);
  643. if (data->num_planes == 2) /* pseudo planar */
  644. data->p[1].addr += xoff;
  645. else /* planar */
  646. data->p[2].addr += xoff + (yoff * ps->ystride[2]);
  647. }
  648. }
  649. static int sde_smmu_get_domain_type(u32 flags, bool rotator)
  650. {
  651. int type;
  652. if (flags & SDE_SECURE_OVERLAY_SESSION)
  653. type = SDE_IOMMU_DOMAIN_ROT_SECURE;
  654. else
  655. type = SDE_IOMMU_DOMAIN_ROT_UNSECURE;
  656. return type;
  657. }
  658. static int sde_mdp_is_map_needed(struct sde_mdp_img_data *data)
  659. {
  660. if (data->flags & SDE_SECURE_CAMERA_SESSION)
  661. return false;
  662. return true;
  663. }
  664. static int sde_mdp_put_img(struct sde_mdp_img_data *data, bool rotator,
  665. int dir)
  666. {
  667. u32 domain;
  668. if (data->flags & SDE_ROT_EXT_IOVA) {
  669. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  670. &data->addr, data->len);
  671. return 0;
  672. }
  673. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  674. SDEROT_DBG("ion hdl=%pK buf=0x%pa\n", data->srcp_dma_buf,
  675. &data->addr);
  676. if (sde_mdp_is_map_needed(data) && data->mapped) {
  677. domain = sde_smmu_get_domain_type(data->flags,
  678. rotator);
  679. data->mapped = false;
  680. SDEROT_DBG("unmap %pad/%lx d:%u f:%x\n", &data->addr,
  681. data->len, domain, data->flags);
  682. }
  683. if (!data->skip_detach) {
  684. data->srcp_attachment->dma_map_attrs |=
  685. DMA_ATTR_DELAYED_UNMAP;
  686. dma_buf_unmap_attachment(data->srcp_attachment,
  687. data->srcp_table, dir);
  688. dma_buf_detach(data->srcp_dma_buf,
  689. data->srcp_attachment);
  690. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  691. dma_buf_put(data->srcp_dma_buf);
  692. data->srcp_dma_buf = NULL;
  693. }
  694. data->skip_detach = true;
  695. }
  696. } else {
  697. return -ENOMEM;
  698. }
  699. return 0;
  700. }
  701. static int sde_mdp_get_img(struct sde_fb_data *img,
  702. struct sde_mdp_img_data *data, struct device *dev,
  703. bool rotator, int dir)
  704. {
  705. int ret = -EINVAL;
  706. u32 domain;
  707. data->flags |= img->flags;
  708. data->offset = img->offset;
  709. if (data->flags & SDE_ROT_EXT_DMA_BUF) {
  710. data->srcp_dma_buf = img->buffer;
  711. } else if (data->flags & SDE_ROT_EXT_IOVA) {
  712. data->addr = img->addr;
  713. data->len = img->len;
  714. SDEROT_DBG("use client %pad/%lx\n", &data->addr, data->len);
  715. return 0;
  716. } else if (IS_ERR(data->srcp_dma_buf)) {
  717. SDEROT_ERR("error on ion_import_fd\n");
  718. ret = PTR_ERR(data->srcp_dma_buf);
  719. data->srcp_dma_buf = NULL;
  720. return ret;
  721. }
  722. if (sde_mdp_is_map_needed(data)) {
  723. domain = sde_smmu_get_domain_type(data->flags, rotator);
  724. SDEROT_DBG("%d domain=%d ihndl=%pK\n",
  725. __LINE__, domain, data->srcp_dma_buf);
  726. data->srcp_attachment =
  727. sde_smmu_dma_buf_attach(data->srcp_dma_buf, dev,
  728. domain);
  729. if (IS_ERR(data->srcp_attachment)) {
  730. SDEROT_ERR("%d Failed to attach dma buf\n", __LINE__);
  731. ret = PTR_ERR(data->srcp_attachment);
  732. goto err_put;
  733. }
  734. } else {
  735. data->srcp_attachment = dma_buf_attach(
  736. data->srcp_dma_buf, dev);
  737. if (IS_ERR(data->srcp_attachment)) {
  738. SDEROT_ERR(
  739. "Failed to attach dma buf for secure camera\n");
  740. ret = PTR_ERR(data->srcp_attachment);
  741. goto err_put;
  742. }
  743. }
  744. SDEROT_DBG("%d attach=%pK\n", __LINE__, data->srcp_attachment);
  745. data->addr = 0;
  746. data->len = 0;
  747. data->mapped = false;
  748. data->skip_detach = false;
  749. /* return early, mapping will be done later */
  750. return 0;
  751. err_put:
  752. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  753. dma_buf_put(data->srcp_dma_buf);
  754. data->srcp_dma_buf = NULL;
  755. }
  756. return ret;
  757. }
  758. static int sde_mdp_map_buffer(struct sde_mdp_img_data *data, bool rotator,
  759. int dir)
  760. {
  761. int ret = -EINVAL;
  762. struct scatterlist *sg;
  763. struct sg_table *sgt = NULL;
  764. unsigned int i;
  765. unsigned long flags = 0;
  766. if (data->addr && data->len)
  767. return 0;
  768. if (data->flags & SDE_ROT_EXT_IOVA) {
  769. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  770. &data->addr, data->len);
  771. return 0;
  772. }
  773. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  774. /*
  775. * dma_buf_map_attachment will call into
  776. * dma_map_sg_attrs, and so all cache maintenance
  777. * attribute and lazy unmap attribute will be all
  778. * provided here.
  779. */
  780. data->srcp_attachment->dma_map_attrs |=
  781. DMA_ATTR_DELAYED_UNMAP;
  782. if (data->srcp_dma_buf && data->srcp_dma_buf->ops &&
  783. data->srcp_dma_buf->ops->get_flags) {
  784. if (data->srcp_dma_buf->ops->get_flags(
  785. data->srcp_dma_buf,
  786. &flags) == 0) {
  787. if ((flags & ION_FLAG_CACHED) == 0) {
  788. SDEROT_DBG("dmabuf is uncached type\n");
  789. data->srcp_attachment->dma_map_attrs |=
  790. DMA_ATTR_SKIP_CPU_SYNC;
  791. }
  792. }
  793. }
  794. sgt = dma_buf_map_attachment(
  795. data->srcp_attachment, dir);
  796. if (IS_ERR_OR_NULL(sgt) ||
  797. IS_ERR_OR_NULL(sgt->sgl)) {
  798. SDEROT_ERR("Failed to map attachment\n");
  799. ret = PTR_ERR(sgt);
  800. goto err_detach;
  801. }
  802. data->srcp_table = sgt;
  803. data->len = 0;
  804. for_each_sgtable_sg(sgt, sg, i) {
  805. data->len += sg->length;
  806. }
  807. if (sde_mdp_is_map_needed(data)) {
  808. data->addr = data->srcp_table->sgl->dma_address;
  809. SDEROT_DBG("map %pad/%lx f:%x\n",
  810. &data->addr,
  811. data->len,
  812. data->flags);
  813. data->mapped = true;
  814. ret = 0;
  815. } else {
  816. if (sgt->nents != 1) {
  817. SDEROT_ERR(
  818. "Fail ion buffer mapping for secure camera\n");
  819. ret = -EINVAL;
  820. goto err_unmap;
  821. }
  822. if (((uint64_t)sg_dma_address(sgt->sgl) >=
  823. PHY_ADDR_4G - sgt->sgl->length)) {
  824. SDEROT_ERR(
  825. "ion buffer mapped size invalid, size=%d\n",
  826. sgt->sgl->length);
  827. ret = -EINVAL;
  828. goto err_unmap;
  829. }
  830. data->addr = sg_phys(data->srcp_table->sgl);
  831. ret = 0;
  832. }
  833. }
  834. if (!data->addr) {
  835. SDEROT_ERR("start address is zero!\n");
  836. sde_mdp_put_img(data, rotator, dir);
  837. return -ENOMEM;
  838. }
  839. if (!ret && (data->offset < data->len)) {
  840. data->addr += data->offset;
  841. data->len -= data->offset;
  842. SDEROT_DBG("ihdl=%pK buf=0x%pa len=0x%lx\n",
  843. data->srcp_dma_buf, &data->addr, data->len);
  844. } else {
  845. sde_mdp_put_img(data, rotator, dir);
  846. return ret ? : -EOVERFLOW;
  847. }
  848. return ret;
  849. err_unmap:
  850. dma_buf_unmap_attachment(data->srcp_attachment, data->srcp_table, dir);
  851. err_detach:
  852. dma_buf_detach(data->srcp_dma_buf, data->srcp_attachment);
  853. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  854. dma_buf_put(data->srcp_dma_buf);
  855. data->srcp_dma_buf = NULL;
  856. }
  857. return ret;
  858. }
  859. static int sde_mdp_data_get(struct sde_mdp_data *data,
  860. struct sde_fb_data *planes, int num_planes, u32 flags,
  861. struct device *dev, bool rotator, int dir)
  862. {
  863. int i, rc = 0;
  864. if ((num_planes <= 0) || (num_planes > SDE_ROT_MAX_PLANES))
  865. return -EINVAL;
  866. for (i = 0; i < num_planes; i++) {
  867. data->p[i].flags = flags;
  868. rc = sde_mdp_get_img(&planes[i], &data->p[i], dev, rotator,
  869. dir);
  870. if (rc) {
  871. SDEROT_ERR("failed to get buf p=%d flags=%x\n",
  872. i, flags);
  873. while (i > 0) {
  874. i--;
  875. sde_mdp_put_img(&data->p[i], rotator, dir);
  876. }
  877. break;
  878. }
  879. }
  880. data->num_planes = i;
  881. return rc;
  882. }
  883. int sde_mdp_data_map(struct sde_mdp_data *data, bool rotator, int dir)
  884. {
  885. int i, rc = 0;
  886. if (!data || !data->num_planes || data->num_planes > SDE_ROT_MAX_PLANES)
  887. return -EINVAL;
  888. for (i = 0; i < data->num_planes; i++) {
  889. rc = sde_mdp_map_buffer(&data->p[i], rotator, dir);
  890. if (rc) {
  891. SDEROT_ERR("failed to map buf p=%d\n", i);
  892. while (i > 0) {
  893. i--;
  894. sde_mdp_put_img(&data->p[i], rotator, dir);
  895. }
  896. break;
  897. }
  898. }
  899. SDEROT_EVTLOG(data->num_planes, dir, data->p[0].addr, data->p[0].len,
  900. data->p[0].mapped);
  901. return rc;
  902. }
  903. void sde_mdp_data_free(struct sde_mdp_data *data, bool rotator, int dir)
  904. {
  905. int i;
  906. sde_smmu_ctrl(1);
  907. for (i = 0; i < data->num_planes && data->p[i].len; i++)
  908. sde_mdp_put_img(&data->p[i], rotator, dir);
  909. sde_smmu_ctrl(0);
  910. data->num_planes = 0;
  911. }
  912. int sde_mdp_data_get_and_validate_size(struct sde_mdp_data *data,
  913. struct sde_fb_data *planes, int num_planes, u32 flags,
  914. struct device *dev, bool rotator, int dir,
  915. struct sde_layer_buffer *buffer)
  916. {
  917. struct sde_mdp_format_params *fmt;
  918. struct sde_mdp_plane_sizes ps;
  919. int ret, i;
  920. unsigned long total_buf_len = 0;
  921. fmt = sde_get_format_params(buffer->format);
  922. if (!fmt) {
  923. SDEROT_ERR("Format %d not supported\n", buffer->format);
  924. return -EINVAL;
  925. }
  926. ret = sde_mdp_data_get(data, planes, num_planes,
  927. flags, dev, rotator, dir);
  928. if (ret)
  929. return ret;
  930. sde_mdp_get_plane_sizes(fmt, buffer->width, buffer->height, &ps, 0, 0);
  931. for (i = 0; i < num_planes ; i++) {
  932. unsigned long plane_len = (data->p[i].srcp_dma_buf) ?
  933. data->p[i].srcp_dma_buf->size : data->p[i].len;
  934. if (plane_len < planes[i].offset) {
  935. SDEROT_ERR("Offset=%d larger than buffer size=%lu\n",
  936. planes[i].offset, plane_len);
  937. ret = -EINVAL;
  938. goto buf_too_small;
  939. }
  940. total_buf_len += plane_len - planes[i].offset;
  941. }
  942. if (total_buf_len < ps.total_size) {
  943. SDEROT_ERR("Buffer size=%lu, expected size=%d\n",
  944. total_buf_len,
  945. ps.total_size);
  946. ret = -EINVAL;
  947. goto buf_too_small;
  948. }
  949. return 0;
  950. buf_too_small:
  951. sde_mdp_data_free(data, rotator, dir);
  952. return ret;
  953. }
  954. static struct sg_table *sde_rot_dmabuf_map_tiny(
  955. struct dma_buf_attachment *attach, enum dma_data_direction dir)
  956. {
  957. struct sde_mdp_img_data *data = attach->dmabuf->priv;
  958. struct sg_table *sgt;
  959. unsigned int order;
  960. struct page *p;
  961. if (!data) {
  962. SDEROT_ERR("NULL img data\n");
  963. return NULL;
  964. }
  965. if (data->len > PAGE_SIZE) {
  966. SDEROT_ERR("DMA buffer size is larger than %ld, bufsize:%ld\n",
  967. PAGE_SIZE, data->len);
  968. return NULL;
  969. }
  970. order = get_order(data->len);
  971. p = alloc_pages(GFP_KERNEL, order);
  972. if (!p) {
  973. SDEROT_ERR("Fail allocating page for datasize:%ld\n",
  974. data->len);
  975. return NULL;
  976. }
  977. sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
  978. if (!sgt)
  979. goto free_alloc_pages;
  980. /* only alloc a single page */
  981. if (sg_alloc_table(sgt, 1, GFP_KERNEL)) {
  982. SDEROT_ERR("fail sg_alloc_table\n");
  983. goto free_sgt;
  984. }
  985. sg_set_page(sgt->sgl, p, data->len, 0);
  986. if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) {
  987. SDEROT_ERR("fail dma_map_sg\n");
  988. goto free_table;
  989. }
  990. SDEROT_DBG("Successful generate sg_table:%pK datalen:%ld\n",
  991. sgt, data->len);
  992. return sgt;
  993. free_table:
  994. sg_free_table(sgt);
  995. free_sgt:
  996. kfree(sgt);
  997. free_alloc_pages:
  998. __free_pages(p, order);
  999. return NULL;
  1000. }
  1001. static void sde_rot_dmabuf_unmap(struct dma_buf_attachment *attach,
  1002. struct sg_table *sgt, enum dma_data_direction dir)
  1003. {
  1004. struct scatterlist *sg;
  1005. int i;
  1006. SDEROT_DBG("DMABUF unmap, sgt:%pK\n", sgt);
  1007. dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir);
  1008. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  1009. put_page(sg_page(sg));
  1010. __free_page(sg_page(sg));
  1011. }
  1012. sg_free_table(sgt);
  1013. kfree(sgt);
  1014. }
  1015. static void sde_rot_dmabuf_release(struct dma_buf *buf)
  1016. {
  1017. SDEROT_DBG("Release dmabuf:%pK\n", buf);
  1018. }
  1019. static int sde_rot_dmabuf_no_mmap(struct dma_buf *buf,
  1020. struct vm_area_struct *vma)
  1021. {
  1022. SDEROT_WARN("NOT SUPPORTING dmabuf mmap\n");
  1023. return -EINVAL;
  1024. }
  1025. static const struct dma_buf_ops sde_rot_dmabuf_ops = {
  1026. .map_dma_buf = sde_rot_dmabuf_map_tiny,
  1027. .unmap_dma_buf = sde_rot_dmabuf_unmap,
  1028. .release = sde_rot_dmabuf_release,
  1029. .mmap = sde_rot_dmabuf_no_mmap,
  1030. };
  1031. struct dma_buf *sde_rot_get_dmabuf(struct sde_mdp_img_data *data)
  1032. {
  1033. DEFINE_DMA_BUF_EXPORT_INFO(exp_info);
  1034. exp_info.ops = &sde_rot_dmabuf_ops;
  1035. exp_info.size = (size_t)data->len;
  1036. exp_info.flags = O_RDWR;
  1037. exp_info.priv = data;
  1038. return dma_buf_export(&exp_info);
  1039. }
  1040. #if (LINUX_VERSION_CODE >= KERNEL_VERSION(5, 19, 0))
  1041. MODULE_IMPORT_NS(DMA_BUF);
  1042. #endif