sde_rotator_util.c 31 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2012, 2015-2019, The Linux Foundation. All rights reserved.
  4. */
  5. #define pr_fmt(fmt) "%s: " fmt, __func__
  6. #include <linux/dma-mapping.h>
  7. #include <linux/errno.h>
  8. #include <linux/file.h>
  9. #include <linux/spinlock.h>
  10. #include <linux/types.h>
  11. #include <linux/major.h>
  12. #include <linux/dma-buf.h>
  13. #include <linux/debugfs.h>
  14. #include <linux/delay.h>
  15. #include <linux/wait.h>
  16. #include <linux/clk.h>
  17. #include <linux/slab.h>
  18. #include <linux/io.h>
  19. #include <linux/iopoll.h>
  20. #include <linux/msm-bus.h>
  21. #include <linux/msm-bus-board.h>
  22. #include <linux/regulator/consumer.h>
  23. #include <media/msm_media_info.h>
  24. #include <linux/videodev2.h>
  25. #include <linux/ion.h>
  26. #include "sde_rotator_util.h"
  27. #include "sde_rotator_smmu.h"
  28. #include "sde_rotator_debug.h"
  29. #define Y_TILEWIDTH 48
  30. #define Y_TILEHEIGHT 4
  31. #define UV_TILEWIDTH 48
  32. #define UV_TILEHEIGHT 8
  33. #define TILEWIDTH_SIZE 64
  34. #define TILEHEIGHT_SIZE 4
  35. void sde_mdp_get_v_h_subsample_rate(u8 chroma_sample,
  36. u8 *v_sample, u8 *h_sample)
  37. {
  38. switch (chroma_sample) {
  39. case SDE_MDP_CHROMA_H2V1:
  40. *v_sample = 1;
  41. *h_sample = 2;
  42. break;
  43. case SDE_MDP_CHROMA_H1V2:
  44. *v_sample = 2;
  45. *h_sample = 1;
  46. break;
  47. case SDE_MDP_CHROMA_420:
  48. *v_sample = 2;
  49. *h_sample = 2;
  50. break;
  51. default:
  52. *v_sample = 1;
  53. *h_sample = 1;
  54. break;
  55. }
  56. }
  57. void sde_rot_intersect_rect(struct sde_rect *res_rect,
  58. const struct sde_rect *dst_rect,
  59. const struct sde_rect *sci_rect)
  60. {
  61. int l = max(dst_rect->x, sci_rect->x);
  62. int t = max(dst_rect->y, sci_rect->y);
  63. int r = min((dst_rect->x + dst_rect->w), (sci_rect->x + sci_rect->w));
  64. int b = min((dst_rect->y + dst_rect->h), (sci_rect->y + sci_rect->h));
  65. if (r < l || b < t)
  66. *res_rect = (struct sde_rect){0, 0, 0, 0};
  67. else
  68. *res_rect = (struct sde_rect){l, t, (r-l), (b-t)};
  69. }
  70. void sde_rot_crop_rect(struct sde_rect *src_rect,
  71. struct sde_rect *dst_rect,
  72. const struct sde_rect *sci_rect)
  73. {
  74. struct sde_rect res;
  75. sde_rot_intersect_rect(&res, dst_rect, sci_rect);
  76. if (res.w && res.h) {
  77. if ((res.w != dst_rect->w) || (res.h != dst_rect->h)) {
  78. src_rect->x = src_rect->x + (res.x - dst_rect->x);
  79. src_rect->y = src_rect->y + (res.y - dst_rect->y);
  80. src_rect->w = res.w;
  81. src_rect->h = res.h;
  82. }
  83. *dst_rect = (struct sde_rect)
  84. {(res.x - sci_rect->x), (res.y - sci_rect->y),
  85. res.w, res.h};
  86. }
  87. }
  88. /*
  89. * sde_rect_cmp() - compares two rects
  90. * @rect1 - rect value to compare
  91. * @rect2 - rect value to compare
  92. *
  93. * Returns 1 if the rects are same, 0 otherwise.
  94. */
  95. int sde_rect_cmp(struct sde_rect *rect1, struct sde_rect *rect2)
  96. {
  97. return rect1->x == rect2->x && rect1->y == rect2->y &&
  98. rect1->w == rect2->w && rect1->h == rect2->h;
  99. }
  100. /*
  101. * sde_rect_overlap_check() - compare two rects and check if they overlap
  102. * @rect1 - rect value to compare
  103. * @rect2 - rect value to compare
  104. *
  105. * Returns true if rects overlap, false otherwise.
  106. */
  107. bool sde_rect_overlap_check(struct sde_rect *rect1, struct sde_rect *rect2)
  108. {
  109. u32 rect1_left = rect1->x, rect1_right = rect1->x + rect1->w;
  110. u32 rect1_top = rect1->y, rect1_bottom = rect1->y + rect1->h;
  111. u32 rect2_left = rect2->x, rect2_right = rect2->x + rect2->w;
  112. u32 rect2_top = rect2->y, rect2_bottom = rect2->y + rect2->h;
  113. if ((rect1_right <= rect2_left) ||
  114. (rect1_left >= rect2_right) ||
  115. (rect1_bottom <= rect2_top) ||
  116. (rect1_top >= rect2_bottom))
  117. return false;
  118. return true;
  119. }
  120. int sde_mdp_get_rau_strides(u32 w, u32 h,
  121. struct sde_mdp_format_params *fmt,
  122. struct sde_mdp_plane_sizes *ps)
  123. {
  124. if (fmt->is_yuv) {
  125. ps->rau_cnt = DIV_ROUND_UP(w, 64);
  126. ps->ystride[0] = 64 * 4;
  127. ps->rau_h[0] = 4;
  128. ps->rau_h[1] = 2;
  129. if (fmt->chroma_sample == SDE_MDP_CHROMA_H1V2)
  130. ps->ystride[1] = 64 * 2;
  131. else if (fmt->chroma_sample == SDE_MDP_CHROMA_H2V1) {
  132. ps->ystride[1] = 32 * 4;
  133. ps->rau_h[1] = 4;
  134. } else
  135. ps->ystride[1] = 32 * 2;
  136. /* account for both chroma components */
  137. ps->ystride[1] <<= 1;
  138. } else if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  139. ps->rau_cnt = DIV_ROUND_UP(w, 32);
  140. ps->ystride[0] = 32 * 4 * fmt->bpp;
  141. ps->ystride[1] = 0;
  142. ps->rau_h[0] = 4;
  143. ps->rau_h[1] = 0;
  144. } else {
  145. SDEROT_ERR("Invalid format=%d\n", fmt->format);
  146. return -EINVAL;
  147. }
  148. ps->ystride[0] *= ps->rau_cnt;
  149. ps->ystride[1] *= ps->rau_cnt;
  150. ps->num_planes = 2;
  151. SDEROT_DBG("BWC rau_cnt=%d strides={%d,%d} heights={%d,%d}\n",
  152. ps->rau_cnt, ps->ystride[0], ps->ystride[1],
  153. ps->rau_h[0], ps->rau_h[1]);
  154. return 0;
  155. }
  156. static int sde_mdp_get_a5x_plane_size(struct sde_mdp_format_params *fmt,
  157. u32 width, u32 height, struct sde_mdp_plane_sizes *ps)
  158. {
  159. int rc = 0;
  160. if (sde_mdp_is_nv12_8b_format(fmt)) {
  161. ps->num_planes = 2;
  162. /* Y bitstream stride and plane size */
  163. ps->ystride[0] = ALIGN(width, 128);
  164. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 32),
  165. 4096);
  166. /* CbCr bitstream stride and plane size */
  167. ps->ystride[1] = ALIGN(width, 128);
  168. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  169. ALIGN(height / 2, 32), 4096);
  170. if (!sde_mdp_is_ubwc_format(fmt))
  171. goto done;
  172. ps->num_planes += 2;
  173. /* Y meta data stride and plane size */
  174. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  175. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  176. ALIGN(DIV_ROUND_UP(height, 8), 16), 4096);
  177. /* CbCr meta data stride and plane size */
  178. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  179. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  180. ALIGN(DIV_ROUND_UP(height / 2, 8), 16), 4096);
  181. } else if (sde_mdp_is_p010_format(fmt)) {
  182. ps->num_planes = 2;
  183. /* Y bitstream stride and plane size */
  184. ps->ystride[0] = ALIGN(width * 2, 256);
  185. ps->plane_size[0] = ALIGN(ps->ystride[0] * ALIGN(height, 16),
  186. 4096);
  187. /* CbCr bitstream stride and plane size */
  188. ps->ystride[1] = ALIGN(width * 2, 256);
  189. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  190. ALIGN(height / 2, 16), 4096);
  191. if (!sde_mdp_is_ubwc_format(fmt))
  192. goto done;
  193. ps->num_planes += 2;
  194. /* Y meta data stride and plane size */
  195. ps->ystride[2] = ALIGN(DIV_ROUND_UP(width, 32), 64);
  196. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  197. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  198. /* CbCr meta data stride and plane size */
  199. ps->ystride[3] = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
  200. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  201. ALIGN(DIV_ROUND_UP(height / 2, 4), 16), 4096);
  202. } else if (sde_mdp_is_tp10_format(fmt)) {
  203. u32 yWidth = sde_mdp_general_align(width, 192);
  204. u32 yHeight = ALIGN(height, 16);
  205. u32 uvWidth = sde_mdp_general_align(width, 192);
  206. u32 uvHeight = ALIGN(height, 32);
  207. ps->num_planes = 2;
  208. /* Y bitstream stride and plane size */
  209. ps->ystride[0] = yWidth * TILEWIDTH_SIZE / Y_TILEWIDTH;
  210. ps->plane_size[0] = ALIGN(ps->ystride[0] *
  211. (yHeight * TILEHEIGHT_SIZE / Y_TILEHEIGHT),
  212. 4096);
  213. /* CbCr bitstream stride and plane size */
  214. ps->ystride[1] = uvWidth * TILEWIDTH_SIZE / UV_TILEWIDTH;
  215. ps->plane_size[1] = ALIGN(ps->ystride[1] *
  216. (uvHeight * TILEHEIGHT_SIZE / UV_TILEHEIGHT),
  217. 4096);
  218. if (!sde_mdp_is_ubwc_format(fmt))
  219. goto done;
  220. ps->num_planes += 2;
  221. /* Y meta data stride and plane size */
  222. ps->ystride[2] = ALIGN(yWidth / Y_TILEWIDTH, 64);
  223. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  224. ALIGN((yHeight / Y_TILEHEIGHT), 16), 4096);
  225. /* CbCr meta data stride and plane size */
  226. ps->ystride[3] = ALIGN(uvWidth / UV_TILEWIDTH, 64);
  227. ps->plane_size[3] = ALIGN(ps->ystride[3] *
  228. ALIGN((uvHeight / UV_TILEHEIGHT), 16), 4096);
  229. } else if (sde_mdp_is_rgb_format(fmt)) {
  230. uint32_t stride_alignment, bpp, aligned_bitstream_width;
  231. if (fmt->format == SDE_PIX_FMT_RGB_565_UBWC) {
  232. stride_alignment = 128;
  233. bpp = 2;
  234. } else {
  235. stride_alignment = 64;
  236. bpp = 4;
  237. }
  238. ps->num_planes = 1;
  239. /* RGB bitstream stride and plane size */
  240. aligned_bitstream_width = ALIGN(width, stride_alignment);
  241. ps->ystride[0] = aligned_bitstream_width * bpp;
  242. ps->plane_size[0] = ALIGN(bpp * aligned_bitstream_width *
  243. ALIGN(height, 16), 4096);
  244. if (!sde_mdp_is_ubwc_format(fmt))
  245. goto done;
  246. ps->num_planes += 1;
  247. /* RGB meta data stride and plane size */
  248. ps->ystride[2] = ALIGN(DIV_ROUND_UP(aligned_bitstream_width,
  249. 16), 64);
  250. ps->plane_size[2] = ALIGN(ps->ystride[2] *
  251. ALIGN(DIV_ROUND_UP(height, 4), 16), 4096);
  252. } else {
  253. SDEROT_ERR("%s: UBWC format not supported for fmt:%d\n",
  254. __func__, fmt->format);
  255. rc = -EINVAL;
  256. }
  257. done:
  258. return rc;
  259. }
  260. int sde_mdp_get_plane_sizes(struct sde_mdp_format_params *fmt, u32 w, u32 h,
  261. struct sde_mdp_plane_sizes *ps, u32 bwc_mode, bool rotation)
  262. {
  263. int i, rc = 0;
  264. u32 bpp;
  265. if (ps == NULL)
  266. return -EINVAL;
  267. if ((w > SDE_ROT_MAX_IMG_WIDTH) || (h > SDE_ROT_MAX_IMG_HEIGHT))
  268. return -ERANGE;
  269. bpp = fmt->bpp;
  270. memset(ps, 0, sizeof(struct sde_mdp_plane_sizes));
  271. if (sde_mdp_is_tilea5x_format(fmt)) {
  272. rc = sde_mdp_get_a5x_plane_size(fmt, w, h, ps);
  273. } else if (bwc_mode) {
  274. u32 height, meta_size;
  275. rc = sde_mdp_get_rau_strides(w, h, fmt, ps);
  276. if (rc)
  277. return rc;
  278. height = DIV_ROUND_UP(h, ps->rau_h[0]);
  279. meta_size = DIV_ROUND_UP(ps->rau_cnt, 8);
  280. ps->ystride[1] += meta_size;
  281. ps->ystride[0] += ps->ystride[1] + meta_size;
  282. ps->plane_size[0] = ps->ystride[0] * height;
  283. ps->ystride[1] = 2;
  284. ps->plane_size[1] = 2 * ps->rau_cnt * height;
  285. SDEROT_DBG("BWC data stride=%d size=%d meta size=%d\n",
  286. ps->ystride[0], ps->plane_size[0], ps->plane_size[1]);
  287. } else {
  288. if (fmt->fetch_planes == SDE_MDP_PLANE_INTERLEAVED) {
  289. ps->num_planes = 1;
  290. ps->plane_size[0] = w * h * bpp;
  291. ps->ystride[0] = w * bpp;
  292. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_VENUS ||
  293. fmt->format == SDE_PIX_FMT_Y_CRCB_H2V2_VENUS ||
  294. fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS) {
  295. int cf;
  296. switch (fmt->format) {
  297. case SDE_PIX_FMT_Y_CBCR_H2V2_VENUS:
  298. cf = COLOR_FMT_NV12;
  299. break;
  300. case SDE_PIX_FMT_Y_CRCB_H2V2_VENUS:
  301. cf = COLOR_FMT_NV21;
  302. break;
  303. case SDE_PIX_FMT_Y_CBCR_H2V2_P010_VENUS:
  304. cf = COLOR_FMT_P010;
  305. break;
  306. default:
  307. SDEROT_ERR("unknown color format %d\n",
  308. fmt->format);
  309. return -EINVAL;
  310. }
  311. ps->num_planes = 2;
  312. ps->ystride[0] = VENUS_Y_STRIDE(cf, w);
  313. ps->ystride[1] = VENUS_UV_STRIDE(cf, w);
  314. ps->plane_size[0] = VENUS_Y_SCANLINES(cf, h) *
  315. ps->ystride[0];
  316. ps->plane_size[1] = VENUS_UV_SCANLINES(cf, h) *
  317. ps->ystride[1];
  318. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_P010) {
  319. /*
  320. * |<---Y1--->000000<---Y0--->000000| Plane0
  321. * |rrrrrrrrrr000000bbbbbbbbbb000000| Plane1
  322. * |--------------------------------|
  323. * 33222222222211111111110000000000 Bit
  324. * 10987654321098765432109876543210 Location
  325. */
  326. ps->num_planes = 2;
  327. ps->ystride[0] = w * 2;
  328. ps->ystride[1] = w * 2;
  329. ps->plane_size[0] = ps->ystride[0] * h;
  330. ps->plane_size[1] = ps->ystride[1] * h / 2;
  331. } else if (fmt->format == SDE_PIX_FMT_Y_CBCR_H2V2_TP10) {
  332. u32 yWidth = sde_mdp_general_align(w, 192);
  333. u32 yHeight = ALIGN(h, 16);
  334. u32 uvWidth = sde_mdp_general_align(w, 192);
  335. u32 uvHeight = (ALIGN(h, 32)) / 2;
  336. ps->num_planes = 2;
  337. ps->ystride[0] = (yWidth / 3) * 4;
  338. ps->ystride[1] = (uvWidth / 3) * 4;
  339. ps->plane_size[0] = ALIGN(ps->ystride[0] * yHeight,
  340. 4096);
  341. ps->plane_size[1] = ALIGN(ps->ystride[1] * uvHeight,
  342. 4096);
  343. } else {
  344. u8 v_subsample, h_subsample, stride_align, height_align;
  345. u32 chroma_samp;
  346. chroma_samp = fmt->chroma_sample;
  347. sde_mdp_get_v_h_subsample_rate(chroma_samp,
  348. &v_subsample, &h_subsample);
  349. switch (fmt->format) {
  350. case SDE_PIX_FMT_Y_CR_CB_GH2V2:
  351. stride_align = 16;
  352. height_align = 1;
  353. break;
  354. default:
  355. stride_align = 1;
  356. height_align = 1;
  357. break;
  358. }
  359. ps->ystride[0] = ALIGN(w, stride_align);
  360. ps->ystride[1] = ALIGN(w / h_subsample, stride_align);
  361. ps->plane_size[0] = ps->ystride[0] *
  362. ALIGN(h, height_align);
  363. ps->plane_size[1] = ps->ystride[1] * (h / v_subsample);
  364. if (fmt->fetch_planes == SDE_MDP_PLANE_PSEUDO_PLANAR) {
  365. ps->num_planes = 2;
  366. ps->plane_size[1] *= 2;
  367. ps->ystride[1] *= 2;
  368. } else { /* planar */
  369. ps->num_planes = 3;
  370. ps->plane_size[2] = ps->plane_size[1];
  371. ps->ystride[2] = ps->ystride[1];
  372. }
  373. }
  374. }
  375. /* Safe to use MAX_PLANES as ps is memset at start of function */
  376. for (i = 0; i < SDE_ROT_MAX_PLANES; i++)
  377. ps->total_size += ps->plane_size[i];
  378. return rc;
  379. }
  380. static int sde_mdp_a5x_data_check(struct sde_mdp_data *data,
  381. struct sde_mdp_plane_sizes *ps,
  382. struct sde_mdp_format_params *fmt)
  383. {
  384. int i, inc;
  385. unsigned long data_size = 0;
  386. dma_addr_t base_addr;
  387. if (data->p[0].len == ps->plane_size[0])
  388. goto end;
  389. /* From this point, assumption is plane 0 is to be divided */
  390. data_size = data->p[0].len;
  391. if (data_size < ps->total_size) {
  392. SDEROT_ERR(
  393. "insufficient current mem len=%lu required mem len=%u\n",
  394. data_size, ps->total_size);
  395. return -ENOMEM;
  396. }
  397. base_addr = data->p[0].addr;
  398. if (sde_mdp_is_yuv_format(fmt)) {
  399. /************************************************/
  400. /* UBWC ** */
  401. /* buffer ** MDP PLANE */
  402. /* format ** */
  403. /************************************************/
  404. /* ------------------- ** -------------------- */
  405. /* | Y meta | ** | Y bitstream | */
  406. /* | data | ** | plane | */
  407. /* ------------------- ** -------------------- */
  408. /* | Y bitstream | ** | CbCr bitstream | */
  409. /* | data | ** | plane | */
  410. /* ------------------- ** -------------------- */
  411. /* | Cbcr metadata | ** | Y meta | */
  412. /* | data | ** | plane | */
  413. /* ------------------- ** -------------------- */
  414. /* | CbCr bitstream | ** | CbCr meta | */
  415. /* | data | ** | plane | */
  416. /* ------------------- ** -------------------- */
  417. /************************************************/
  418. /* configure Y bitstream plane */
  419. data->p[0].addr = base_addr + ps->plane_size[2];
  420. data->p[0].len = ps->plane_size[0];
  421. /* configure CbCr bitstream plane */
  422. data->p[1].addr = base_addr + ps->plane_size[0]
  423. + ps->plane_size[2] + ps->plane_size[3];
  424. data->p[1].len = ps->plane_size[1];
  425. if (!sde_mdp_is_ubwc_format(fmt))
  426. goto done;
  427. /* configure Y metadata plane */
  428. data->p[2].addr = base_addr;
  429. data->p[2].len = ps->plane_size[2];
  430. /* configure CbCr metadata plane */
  431. data->p[3].addr = base_addr + ps->plane_size[0]
  432. + ps->plane_size[2];
  433. data->p[3].len = ps->plane_size[3];
  434. } else {
  435. /************************************************/
  436. /* UBWC ** */
  437. /* buffer ** MDP PLANE */
  438. /* format ** */
  439. /************************************************/
  440. /* ------------------- ** -------------------- */
  441. /* | RGB meta | ** | RGB bitstream | */
  442. /* | data | ** | plane | */
  443. /* ------------------- ** -------------------- */
  444. /* | RGB bitstream | ** | NONE | */
  445. /* | data | ** | | */
  446. /* ------------------- ** -------------------- */
  447. /* ** | RGB meta | */
  448. /* ** | plane | */
  449. /* ** -------------------- */
  450. /************************************************/
  451. /* configure RGB bitstream plane */
  452. data->p[0].addr = base_addr + ps->plane_size[2];
  453. data->p[0].len = ps->plane_size[0];
  454. if (!sde_mdp_is_ubwc_format(fmt))
  455. goto done;
  456. /* configure RGB metadata plane */
  457. data->p[2].addr = base_addr;
  458. data->p[2].len = ps->plane_size[2];
  459. }
  460. done:
  461. data->num_planes = ps->num_planes;
  462. end:
  463. if (data->num_planes != ps->num_planes) {
  464. SDEROT_ERR("num_planes don't match: fmt:%d, data:%d, ps:%d\n",
  465. fmt->format, data->num_planes, ps->num_planes);
  466. return -EINVAL;
  467. }
  468. inc = (sde_mdp_is_yuv_format(fmt) ? 1 : 2);
  469. for (i = 0; i < SDE_ROT_MAX_PLANES; i += inc) {
  470. if (data->p[i].len != ps->plane_size[i]) {
  471. SDEROT_ERR(
  472. "plane:%d fmt:%d, len does not match: data:%lu, ps:%d\n",
  473. i, fmt->format, data->p[i].len,
  474. ps->plane_size[i]);
  475. return -EINVAL;
  476. }
  477. }
  478. return 0;
  479. }
  480. int sde_mdp_data_check(struct sde_mdp_data *data,
  481. struct sde_mdp_plane_sizes *ps,
  482. struct sde_mdp_format_params *fmt)
  483. {
  484. struct sde_mdp_img_data *prev, *curr;
  485. int i;
  486. if (!ps)
  487. return 0;
  488. if (!data || data->num_planes == 0)
  489. return -ENOMEM;
  490. if (sde_mdp_is_tilea5x_format(fmt))
  491. return sde_mdp_a5x_data_check(data, ps, fmt);
  492. SDEROT_DBG("srcp0=%pa len=%lu frame_size=%u\n", &data->p[0].addr,
  493. data->p[0].len, ps->total_size);
  494. for (i = 0; i < ps->num_planes; i++) {
  495. curr = &data->p[i];
  496. if (i >= data->num_planes) {
  497. u32 psize = ps->plane_size[i-1];
  498. prev = &data->p[i-1];
  499. if (prev->len > psize) {
  500. curr->len = prev->len - psize;
  501. prev->len = psize;
  502. }
  503. curr->addr = prev->addr + psize;
  504. }
  505. if (curr->len < ps->plane_size[i]) {
  506. SDEROT_ERR("insufficient mem=%lu p=%d len=%u\n",
  507. curr->len, i, ps->plane_size[i]);
  508. return -ENOMEM;
  509. }
  510. SDEROT_DBG("plane[%d] addr=%pa len=%lu\n", i,
  511. &curr->addr, curr->len);
  512. }
  513. data->num_planes = ps->num_planes;
  514. return 0;
  515. }
  516. int sde_validate_offset_for_ubwc_format(
  517. struct sde_mdp_format_params *fmt, u16 x, u16 y)
  518. {
  519. int ret;
  520. u16 micro_w = 0, micro_h = 0;
  521. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  522. if (ret || !micro_w || !micro_h) {
  523. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  524. return -EINVAL;
  525. }
  526. if (x % (micro_w * UBWC_META_MACRO_W_H)) {
  527. SDEROT_ERR("x=%d does not align with meta width=%d\n", x,
  528. micro_w * UBWC_META_MACRO_W_H);
  529. return -EINVAL;
  530. }
  531. if (y % (micro_h * UBWC_META_MACRO_W_H)) {
  532. SDEROT_ERR("y=%d does not align with meta height=%d\n", y,
  533. UBWC_META_MACRO_W_H);
  534. return -EINVAL;
  535. }
  536. return ret;
  537. }
  538. /* x and y are assumed to be valid, expected to line up with start of tiles */
  539. void sde_rot_ubwc_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  540. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  541. {
  542. u16 macro_w, micro_w, micro_h;
  543. u32 offset = 0;
  544. int ret;
  545. ret = sde_rot_get_ubwc_micro_dim(fmt->format, &micro_w, &micro_h);
  546. if (ret || !micro_w || !micro_h) {
  547. SDEROT_ERR("Could not get valid micro tile dimensions\n");
  548. return;
  549. }
  550. macro_w = 4 * micro_w;
  551. if (sde_mdp_is_nv12_8b_format(fmt)) {
  552. u16 chroma_macro_w = macro_w / 2;
  553. u16 chroma_micro_w = micro_w / 2;
  554. /* plane 1 and 3 are chroma, with sub sample of 2 */
  555. offset = y * ps->ystride[0] +
  556. (x / macro_w) * 4096;
  557. if (offset < data->p[0].len) {
  558. data->p[0].addr += offset;
  559. } else {
  560. ret = 1;
  561. goto done;
  562. }
  563. offset = y / 2 * ps->ystride[1] +
  564. ((x / 2) / chroma_macro_w) * 4096;
  565. if (offset < data->p[1].len) {
  566. data->p[1].addr += offset;
  567. } else {
  568. ret = 2;
  569. goto done;
  570. }
  571. offset = (y / micro_h) * ps->ystride[2] +
  572. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  573. UBWC_META_BLOCK_SIZE;
  574. if (offset < data->p[2].len) {
  575. data->p[2].addr += offset;
  576. } else {
  577. ret = 3;
  578. goto done;
  579. }
  580. offset = ((y / 2) / micro_h) * ps->ystride[3] +
  581. (((x / 2) / chroma_micro_w) / UBWC_META_MACRO_W_H) *
  582. UBWC_META_BLOCK_SIZE;
  583. if (offset < data->p[3].len) {
  584. data->p[3].addr += offset;
  585. } else {
  586. ret = 4;
  587. goto done;
  588. }
  589. } else if (sde_mdp_is_nv12_10b_format(fmt)) {
  590. /* TODO: */
  591. SDEROT_ERR("%c%c%c%c format not implemented yet",
  592. fmt->format >> 0, fmt->format >> 8,
  593. fmt->format >> 16, fmt->format >> 24);
  594. ret = 1;
  595. goto done;
  596. } else {
  597. offset = y * ps->ystride[0] +
  598. (x / macro_w) * 4096;
  599. if (offset < data->p[0].len) {
  600. data->p[0].addr += offset;
  601. } else {
  602. ret = 1;
  603. goto done;
  604. }
  605. offset = DIV_ROUND_UP(y, micro_h) * ps->ystride[2] +
  606. ((x / micro_w) / UBWC_META_MACRO_W_H) *
  607. UBWC_META_BLOCK_SIZE;
  608. if (offset < data->p[2].len) {
  609. data->p[2].addr += offset;
  610. } else {
  611. ret = 3;
  612. goto done;
  613. }
  614. }
  615. done:
  616. if (ret) {
  617. WARN(1, "idx %d, offsets:%u too large for buflen%lu\n",
  618. (ret - 1), offset, data->p[(ret - 1)].len);
  619. }
  620. }
  621. void sde_rot_data_calc_offset(struct sde_mdp_data *data, u16 x, u16 y,
  622. struct sde_mdp_plane_sizes *ps, struct sde_mdp_format_params *fmt)
  623. {
  624. if ((x == 0) && (y == 0))
  625. return;
  626. if (sde_mdp_is_tilea5x_format(fmt)) {
  627. sde_rot_ubwc_data_calc_offset(data, x, y, ps, fmt);
  628. return;
  629. }
  630. data->p[0].addr += y * ps->ystride[0];
  631. if (data->num_planes == 1) {
  632. data->p[0].addr += x * fmt->bpp;
  633. } else {
  634. u16 xoff, yoff;
  635. u8 v_subsample, h_subsample;
  636. sde_mdp_get_v_h_subsample_rate(fmt->chroma_sample,
  637. &v_subsample, &h_subsample);
  638. xoff = x / h_subsample;
  639. yoff = y / v_subsample;
  640. data->p[0].addr += x;
  641. data->p[1].addr += xoff + (yoff * ps->ystride[1]);
  642. if (data->num_planes == 2) /* pseudo planar */
  643. data->p[1].addr += xoff;
  644. else /* planar */
  645. data->p[2].addr += xoff + (yoff * ps->ystride[2]);
  646. }
  647. }
  648. static int sde_smmu_get_domain_type(u32 flags, bool rotator)
  649. {
  650. int type;
  651. if (flags & SDE_SECURE_OVERLAY_SESSION)
  652. type = SDE_IOMMU_DOMAIN_ROT_SECURE;
  653. else
  654. type = SDE_IOMMU_DOMAIN_ROT_UNSECURE;
  655. return type;
  656. }
  657. static int sde_mdp_is_map_needed(struct sde_mdp_img_data *data)
  658. {
  659. if (data->flags & SDE_SECURE_CAMERA_SESSION)
  660. return false;
  661. return true;
  662. }
  663. static int sde_mdp_put_img(struct sde_mdp_img_data *data, bool rotator,
  664. int dir)
  665. {
  666. u32 domain;
  667. if (data->flags & SDE_ROT_EXT_IOVA) {
  668. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  669. &data->addr, data->len);
  670. return 0;
  671. }
  672. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  673. SDEROT_DBG("ion hdl=%pK buf=0x%pa\n", data->srcp_dma_buf,
  674. &data->addr);
  675. if (sde_mdp_is_map_needed(data) && data->mapped) {
  676. domain = sde_smmu_get_domain_type(data->flags,
  677. rotator);
  678. data->mapped = false;
  679. SDEROT_DBG("unmap %pad/%lx d:%u f:%x\n", &data->addr,
  680. data->len, domain, data->flags);
  681. }
  682. if (!data->skip_detach) {
  683. data->srcp_attachment->dma_map_attrs |=
  684. DMA_ATTR_DELAYED_UNMAP;
  685. dma_buf_unmap_attachment(data->srcp_attachment,
  686. data->srcp_table, dir);
  687. dma_buf_detach(data->srcp_dma_buf,
  688. data->srcp_attachment);
  689. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  690. dma_buf_put(data->srcp_dma_buf);
  691. data->srcp_dma_buf = NULL;
  692. }
  693. data->skip_detach = true;
  694. }
  695. } else {
  696. return -ENOMEM;
  697. }
  698. return 0;
  699. }
  700. static int sde_mdp_get_img(struct sde_fb_data *img,
  701. struct sde_mdp_img_data *data, struct device *dev,
  702. bool rotator, int dir)
  703. {
  704. int ret = -EINVAL;
  705. u32 domain;
  706. data->flags |= img->flags;
  707. data->offset = img->offset;
  708. if (data->flags & SDE_ROT_EXT_DMA_BUF) {
  709. data->srcp_dma_buf = img->buffer;
  710. } else if (data->flags & SDE_ROT_EXT_IOVA) {
  711. data->addr = img->addr;
  712. data->len = img->len;
  713. SDEROT_DBG("use client %pad/%lx\n", &data->addr, data->len);
  714. return 0;
  715. } else if (IS_ERR(data->srcp_dma_buf)) {
  716. SDEROT_ERR("error on ion_import_fd\n");
  717. ret = PTR_ERR(data->srcp_dma_buf);
  718. data->srcp_dma_buf = NULL;
  719. return ret;
  720. }
  721. if (sde_mdp_is_map_needed(data)) {
  722. domain = sde_smmu_get_domain_type(data->flags, rotator);
  723. SDEROT_DBG("%d domain=%d ihndl=%pK\n",
  724. __LINE__, domain, data->srcp_dma_buf);
  725. data->srcp_attachment =
  726. sde_smmu_dma_buf_attach(data->srcp_dma_buf, dev,
  727. domain);
  728. if (IS_ERR(data->srcp_attachment)) {
  729. SDEROT_ERR("%d Failed to attach dma buf\n", __LINE__);
  730. ret = PTR_ERR(data->srcp_attachment);
  731. goto err_put;
  732. }
  733. } else {
  734. data->srcp_attachment = dma_buf_attach(
  735. data->srcp_dma_buf, dev);
  736. if (IS_ERR(data->srcp_attachment)) {
  737. SDEROT_ERR(
  738. "Failed to attach dma buf for secure camera\n");
  739. ret = PTR_ERR(data->srcp_attachment);
  740. goto err_put;
  741. }
  742. }
  743. SDEROT_DBG("%d attach=%pK\n", __LINE__, data->srcp_attachment);
  744. data->addr = 0;
  745. data->len = 0;
  746. data->mapped = false;
  747. data->skip_detach = false;
  748. /* return early, mapping will be done later */
  749. return 0;
  750. err_put:
  751. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  752. dma_buf_put(data->srcp_dma_buf);
  753. data->srcp_dma_buf = NULL;
  754. }
  755. return ret;
  756. }
  757. static int sde_mdp_map_buffer(struct sde_mdp_img_data *data, bool rotator,
  758. int dir)
  759. {
  760. int ret = -EINVAL;
  761. struct scatterlist *sg;
  762. struct sg_table *sgt = NULL;
  763. unsigned int i;
  764. unsigned long flags = 0;
  765. if (data->addr && data->len)
  766. return 0;
  767. if (data->flags & SDE_ROT_EXT_IOVA) {
  768. SDEROT_DBG("buffer %pad/%lx is client mapped\n",
  769. &data->addr, data->len);
  770. return 0;
  771. }
  772. if (!IS_ERR_OR_NULL(data->srcp_dma_buf)) {
  773. /*
  774. * dma_buf_map_attachment will call into
  775. * dma_map_sg_attrs, and so all cache maintenance
  776. * attribute and lazy unmap attribute will be all
  777. * provided here.
  778. */
  779. data->srcp_attachment->dma_map_attrs |=
  780. DMA_ATTR_DELAYED_UNMAP;
  781. if (data->srcp_dma_buf && data->srcp_dma_buf->ops &&
  782. data->srcp_dma_buf->ops->get_flags) {
  783. if (data->srcp_dma_buf->ops->get_flags(
  784. data->srcp_dma_buf,
  785. &flags) == 0) {
  786. if ((flags & ION_FLAG_CACHED) == 0) {
  787. SDEROT_DBG("dmabuf is uncached type\n");
  788. data->srcp_attachment->dma_map_attrs |=
  789. DMA_ATTR_SKIP_CPU_SYNC;
  790. }
  791. }
  792. }
  793. sgt = dma_buf_map_attachment(
  794. data->srcp_attachment, dir);
  795. if (IS_ERR_OR_NULL(sgt) ||
  796. IS_ERR_OR_NULL(sgt->sgl)) {
  797. SDEROT_ERR("Failed to map attachment\n");
  798. ret = PTR_ERR(sgt);
  799. goto err_detach;
  800. }
  801. data->srcp_table = sgt;
  802. data->len = 0;
  803. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  804. data->len += sg->length;
  805. }
  806. if (sde_mdp_is_map_needed(data)) {
  807. data->addr = data->srcp_table->sgl->dma_address;
  808. SDEROT_DBG("map %pad/%lx f:%x\n",
  809. &data->addr,
  810. data->len,
  811. data->flags);
  812. data->mapped = true;
  813. ret = 0;
  814. } else {
  815. if (sgt->nents != 1) {
  816. SDEROT_ERR(
  817. "Fail ion buffer mapping for secure camera\n");
  818. ret = -EINVAL;
  819. goto err_unmap;
  820. }
  821. if (((uint64_t)sg_dma_address(sgt->sgl) >=
  822. PHY_ADDR_4G - sgt->sgl->length)) {
  823. SDEROT_ERR(
  824. "ion buffer mapped size invalid, size=%d\n",
  825. sgt->sgl->length);
  826. ret = -EINVAL;
  827. goto err_unmap;
  828. }
  829. data->addr = sg_phys(data->srcp_table->sgl);
  830. ret = 0;
  831. }
  832. }
  833. if (!data->addr) {
  834. SDEROT_ERR("start address is zero!\n");
  835. sde_mdp_put_img(data, rotator, dir);
  836. return -ENOMEM;
  837. }
  838. if (!ret && (data->offset < data->len)) {
  839. data->addr += data->offset;
  840. data->len -= data->offset;
  841. SDEROT_DBG("ihdl=%pK buf=0x%pa len=0x%lx\n",
  842. data->srcp_dma_buf, &data->addr, data->len);
  843. } else {
  844. sde_mdp_put_img(data, rotator, dir);
  845. return ret ? : -EOVERFLOW;
  846. }
  847. return ret;
  848. err_unmap:
  849. dma_buf_unmap_attachment(data->srcp_attachment, data->srcp_table, dir);
  850. err_detach:
  851. dma_buf_detach(data->srcp_dma_buf, data->srcp_attachment);
  852. if (!(data->flags & SDE_ROT_EXT_DMA_BUF)) {
  853. dma_buf_put(data->srcp_dma_buf);
  854. data->srcp_dma_buf = NULL;
  855. }
  856. return ret;
  857. }
  858. static int sde_mdp_data_get(struct sde_mdp_data *data,
  859. struct sde_fb_data *planes, int num_planes, u32 flags,
  860. struct device *dev, bool rotator, int dir)
  861. {
  862. int i, rc = 0;
  863. if ((num_planes <= 0) || (num_planes > SDE_ROT_MAX_PLANES))
  864. return -EINVAL;
  865. for (i = 0; i < num_planes; i++) {
  866. data->p[i].flags = flags;
  867. rc = sde_mdp_get_img(&planes[i], &data->p[i], dev, rotator,
  868. dir);
  869. if (rc) {
  870. SDEROT_ERR("failed to get buf p=%d flags=%x\n",
  871. i, flags);
  872. while (i > 0) {
  873. i--;
  874. sde_mdp_put_img(&data->p[i], rotator, dir);
  875. }
  876. break;
  877. }
  878. }
  879. data->num_planes = i;
  880. return rc;
  881. }
  882. int sde_mdp_data_map(struct sde_mdp_data *data, bool rotator, int dir)
  883. {
  884. int i, rc = 0;
  885. if (!data || !data->num_planes || data->num_planes > SDE_ROT_MAX_PLANES)
  886. return -EINVAL;
  887. for (i = 0; i < data->num_planes; i++) {
  888. rc = sde_mdp_map_buffer(&data->p[i], rotator, dir);
  889. if (rc) {
  890. SDEROT_ERR("failed to map buf p=%d\n", i);
  891. while (i > 0) {
  892. i--;
  893. sde_mdp_put_img(&data->p[i], rotator, dir);
  894. }
  895. break;
  896. }
  897. }
  898. SDEROT_EVTLOG(data->num_planes, dir, data->p[0].addr, data->p[0].len,
  899. data->p[0].mapped);
  900. return rc;
  901. }
  902. void sde_mdp_data_free(struct sde_mdp_data *data, bool rotator, int dir)
  903. {
  904. int i;
  905. sde_smmu_ctrl(1);
  906. for (i = 0; i < data->num_planes && data->p[i].len; i++)
  907. sde_mdp_put_img(&data->p[i], rotator, dir);
  908. sde_smmu_ctrl(0);
  909. data->num_planes = 0;
  910. }
  911. int sde_mdp_data_get_and_validate_size(struct sde_mdp_data *data,
  912. struct sde_fb_data *planes, int num_planes, u32 flags,
  913. struct device *dev, bool rotator, int dir,
  914. struct sde_layer_buffer *buffer)
  915. {
  916. struct sde_mdp_format_params *fmt;
  917. struct sde_mdp_plane_sizes ps;
  918. int ret, i;
  919. unsigned long total_buf_len = 0;
  920. fmt = sde_get_format_params(buffer->format);
  921. if (!fmt) {
  922. SDEROT_ERR("Format %d not supported\n", buffer->format);
  923. return -EINVAL;
  924. }
  925. ret = sde_mdp_data_get(data, planes, num_planes,
  926. flags, dev, rotator, dir);
  927. if (ret)
  928. return ret;
  929. sde_mdp_get_plane_sizes(fmt, buffer->width, buffer->height, &ps, 0, 0);
  930. for (i = 0; i < num_planes ; i++) {
  931. unsigned long plane_len = (data->p[i].srcp_dma_buf) ?
  932. data->p[i].srcp_dma_buf->size : data->p[i].len;
  933. if (plane_len < planes[i].offset) {
  934. SDEROT_ERR("Offset=%d larger than buffer size=%lu\n",
  935. planes[i].offset, plane_len);
  936. ret = -EINVAL;
  937. goto buf_too_small;
  938. }
  939. total_buf_len += plane_len - planes[i].offset;
  940. }
  941. if (total_buf_len < ps.total_size) {
  942. SDEROT_ERR("Buffer size=%lu, expected size=%d\n",
  943. total_buf_len,
  944. ps.total_size);
  945. ret = -EINVAL;
  946. goto buf_too_small;
  947. }
  948. return 0;
  949. buf_too_small:
  950. sde_mdp_data_free(data, rotator, dir);
  951. return ret;
  952. }
  953. static struct sg_table *sde_rot_dmabuf_map_tiny(
  954. struct dma_buf_attachment *attach, enum dma_data_direction dir)
  955. {
  956. struct sde_mdp_img_data *data = attach->dmabuf->priv;
  957. struct sg_table *sgt;
  958. unsigned int order;
  959. struct page *p;
  960. if (!data) {
  961. SDEROT_ERR("NULL img data\n");
  962. return NULL;
  963. }
  964. if (data->len > PAGE_SIZE) {
  965. SDEROT_ERR("DMA buffer size is larger than %ld, bufsize:%ld\n",
  966. PAGE_SIZE, data->len);
  967. return NULL;
  968. }
  969. order = get_order(data->len);
  970. p = alloc_pages(GFP_KERNEL, order);
  971. if (!p) {
  972. SDEROT_ERR("Fail allocating page for datasize:%ld\n",
  973. data->len);
  974. return NULL;
  975. }
  976. sgt = kmalloc(sizeof(*sgt), GFP_KERNEL);
  977. if (!sgt)
  978. goto free_alloc_pages;
  979. /* only alloc a single page */
  980. if (sg_alloc_table(sgt, 1, GFP_KERNEL)) {
  981. SDEROT_ERR("fail sg_alloc_table\n");
  982. goto free_sgt;
  983. }
  984. sg_set_page(sgt->sgl, p, data->len, 0);
  985. if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) {
  986. SDEROT_ERR("fail dma_map_sg\n");
  987. goto free_table;
  988. }
  989. SDEROT_DBG("Successful generate sg_table:%pK datalen:%ld\n",
  990. sgt, data->len);
  991. return sgt;
  992. free_table:
  993. sg_free_table(sgt);
  994. free_sgt:
  995. kfree(sgt);
  996. free_alloc_pages:
  997. __free_pages(p, order);
  998. return NULL;
  999. }
  1000. static void sde_rot_dmabuf_unmap(struct dma_buf_attachment *attach,
  1001. struct sg_table *sgt, enum dma_data_direction dir)
  1002. {
  1003. struct scatterlist *sg;
  1004. int i;
  1005. SDEROT_DBG("DMABUF unmap, sgt:%pK\n", sgt);
  1006. dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir);
  1007. for_each_sg(sgt->sgl, sg, sgt->nents, i) {
  1008. put_page(sg_page(sg));
  1009. __free_page(sg_page(sg));
  1010. }
  1011. sg_free_table(sgt);
  1012. kfree(sgt);
  1013. }
  1014. static void *sde_rot_dmabuf_no_map(struct dma_buf *buf, unsigned long n)
  1015. {
  1016. SDEROT_WARN("NOT SUPPORTING dmabuf map\n");
  1017. return NULL;
  1018. }
  1019. static void sde_rot_dmabuf_no_unmap(struct dma_buf *buf, unsigned long n,
  1020. void *addr)
  1021. {
  1022. SDEROT_WARN("NOT SUPPORTING dmabuf unmap\n");
  1023. }
  1024. static void sde_rot_dmabuf_release(struct dma_buf *buf)
  1025. {
  1026. SDEROT_DBG("Release dmabuf:%pK\n", buf);
  1027. }
  1028. static int sde_rot_dmabuf_no_mmap(struct dma_buf *buf,
  1029. struct vm_area_struct *vma)
  1030. {
  1031. SDEROT_WARN("NOT SUPPORTING dmabuf mmap\n");
  1032. return -EINVAL;
  1033. }
  1034. static const struct dma_buf_ops sde_rot_dmabuf_ops = {
  1035. .map_dma_buf = sde_rot_dmabuf_map_tiny,
  1036. .unmap_dma_buf = sde_rot_dmabuf_unmap,
  1037. .release = sde_rot_dmabuf_release,
  1038. .map = sde_rot_dmabuf_no_map,
  1039. .unmap = sde_rot_dmabuf_no_unmap,
  1040. .mmap = sde_rot_dmabuf_no_mmap,
  1041. };
  1042. struct dma_buf *sde_rot_get_dmabuf(struct sde_mdp_img_data *data)
  1043. {
  1044. DEFINE_DMA_BUF_EXPORT_INFO(exp_info);
  1045. exp_info.ops = &sde_rot_dmabuf_ops;
  1046. exp_info.size = (size_t)data->len;
  1047. exp_info.flags = O_RDWR;
  1048. exp_info.priv = data;
  1049. return dma_buf_export(&exp_info);
  1050. }