sde_hw_util.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2021-2022 Qualcomm Innovation Center, Inc. All rights reserved.
  4. * Copyright (c) 2015-2021, The Linux Foundation. All rights reserved.
  5. */
  6. #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
  7. #include <drm/sde_drm.h>
  8. #include "msm_drv.h"
  9. #include "sde_kms.h"
  10. #include "sde_hw_mdss.h"
  11. #include "sde_hw_util.h"
  12. /* using a file static variables for debugfs access */
  13. static u32 sde_hw_util_log_mask = SDE_DBG_MASK_NONE;
  14. /* SDE_SCALER_QSEED3 */
  15. #define QSEED3_HW_VERSION 0x00
  16. #define QSEED3_OP_MODE 0x04
  17. #define QSEED3_RGB2Y_COEFF 0x08
  18. #define QSEED3_PHASE_INIT 0x0C
  19. #define QSEED3_PHASE_STEP_Y_H 0x10
  20. #define QSEED3_PHASE_STEP_Y_V 0x14
  21. #define QSEED3_PHASE_STEP_UV_H 0x18
  22. #define QSEED3_PHASE_STEP_UV_V 0x1C
  23. #define QSEED3_PRELOAD 0x20
  24. #define QSEED3_DE_SHARPEN 0x24
  25. #define QSEED3_DE_SHARPEN_CTL 0x28
  26. #define QSEED3_DE_SHAPE_CTL 0x2C
  27. #define QSEED3_DE_THRESHOLD 0x30
  28. #define QSEED3_DE_ADJUST_DATA_0 0x34
  29. #define QSEED3_DE_ADJUST_DATA_1 0x38
  30. #define QSEED3_DE_ADJUST_DATA_2 0x3C
  31. #define QSEED3_DE_LPF_BLEND 0x64
  32. #define QSEED3_SRC_SIZE_Y_RGB_A 0x40
  33. #define QSEED3_SRC_SIZE_UV 0x44
  34. #define QSEED3_DST_SIZE 0x48
  35. #define QSEED3_COEF_LUT_CTRL 0x4C
  36. #define QSEED3_COEF_LUT_SWAP_BIT 0
  37. #define QSEED3_BUFFER_CTRL 0x50
  38. #define QSEED3_CLK_CTRL0 0x54
  39. #define QSEED3_CLK_CTRL1 0x58
  40. #define QSEED3_CLK_STATUS 0x5C
  41. #define QSEED3_MISR_CTRL 0x70
  42. #define QSEED3_MISR_SIGNATURE_0 0x74
  43. #define QSEED3_MISR_SIGNATURE_1 0x78
  44. #define QSEED3_PHASE_INIT_Y_H 0x90
  45. #define QSEED3_PHASE_INIT_Y_V 0x94
  46. #define QSEED3_PHASE_INIT_UV_H 0x98
  47. #define QSEED3_PHASE_INIT_UV_V 0x9C
  48. #define QSEED3_ENABLE 2
  49. #define CSC_MATRIX_SHIFT 7
  50. /* SDE_SCALER_QSEED3LITE */
  51. #define QSEED3L_COEF_LUT_Y_SEP_BIT 4
  52. #define QSEED3L_COEF_LUT_UV_SEP_BIT 5
  53. #define QSEED3L_COEF_LUT_CTRL 0x4C
  54. #define QSEED3L_COEF_LUT_SWAP_BIT 0
  55. #define QSEED3L_DIR_FILTER_WEIGHT 0x60
  56. #define QSEED3LITE_SCALER_VERSION 0x2004
  57. #define QSEED4_SCALER_VERSION 0x3000
  58. #define QSEED3_DEFAULT_PRELOAD_V 0x3
  59. #define QSEED3_DEFAULT_PRELOAD_H 0x4
  60. #define QSEED4_DEFAULT_PRELOAD_V 0x2
  61. #define QSEED4_DEFAULT_PRELOAD_H 0x4
  62. typedef void (*scaler_lut_type)(struct sde_hw_blk_reg_map *,
  63. struct sde_hw_scaler3_cfg *, u32);
  64. void sde_reg_write(struct sde_hw_blk_reg_map *c,
  65. u32 reg_off,
  66. u32 val,
  67. const char *name)
  68. {
  69. /* don't need to mutex protect this */
  70. if (c->log_mask & sde_hw_util_log_mask)
  71. SDE_DEBUG_DRIVER("[%s:0x%X] <= 0x%X\n",
  72. name, c->blk_off + reg_off, val);
  73. writel_relaxed(val, c->base_off + c->blk_off + reg_off);
  74. SDE_REG_LOG(c->log_mask ? ilog2(c->log_mask)+1 : 0,
  75. val, c->blk_off + reg_off);
  76. }
  77. int sde_reg_read(struct sde_hw_blk_reg_map *c, u32 reg_off)
  78. {
  79. return readl_relaxed(c->base_off + c->blk_off + reg_off);
  80. }
  81. u32 *sde_hw_util_get_log_mask_ptr(void)
  82. {
  83. return &sde_hw_util_log_mask;
  84. }
  85. void sde_init_scaler_blk(struct sde_scaler_blk *blk, u32 version)
  86. {
  87. if (!blk)
  88. return;
  89. blk->version = version;
  90. blk->v_preload = QSEED4_DEFAULT_PRELOAD_V;
  91. blk->h_preload = QSEED4_DEFAULT_PRELOAD_H;
  92. if (version < QSEED4_SCALER_VERSION) {
  93. blk->v_preload = QSEED3_DEFAULT_PRELOAD_V;
  94. blk->h_preload = QSEED3_DEFAULT_PRELOAD_H;
  95. }
  96. }
  97. void sde_set_scaler_v2(struct sde_hw_scaler3_cfg *cfg,
  98. const struct sde_drm_scaler_v2 *scale_v2)
  99. {
  100. int i;
  101. cfg->enable = scale_v2->enable;
  102. cfg->dir_en = scale_v2->dir_en;
  103. for (i = 0; i < SDE_MAX_PLANES; i++) {
  104. cfg->init_phase_x[i] = scale_v2->init_phase_x[i];
  105. cfg->phase_step_x[i] = scale_v2->phase_step_x[i];
  106. cfg->init_phase_y[i] = scale_v2->init_phase_y[i];
  107. cfg->phase_step_y[i] = scale_v2->phase_step_y[i];
  108. cfg->preload_x[i] = scale_v2->preload_x[i];
  109. cfg->preload_y[i] = scale_v2->preload_y[i];
  110. cfg->src_width[i] = scale_v2->src_width[i];
  111. cfg->src_height[i] = scale_v2->src_height[i];
  112. }
  113. cfg->dst_width = scale_v2->dst_width;
  114. cfg->dst_height = scale_v2->dst_height;
  115. cfg->y_rgb_filter_cfg = scale_v2->y_rgb_filter_cfg;
  116. cfg->uv_filter_cfg = scale_v2->uv_filter_cfg;
  117. cfg->alpha_filter_cfg = scale_v2->alpha_filter_cfg;
  118. cfg->blend_cfg = scale_v2->blend_cfg;
  119. cfg->lut_flag = scale_v2->lut_flag;
  120. cfg->dir_lut_idx = scale_v2->dir_lut_idx;
  121. cfg->y_rgb_cir_lut_idx = scale_v2->y_rgb_cir_lut_idx;
  122. cfg->uv_cir_lut_idx = scale_v2->uv_cir_lut_idx;
  123. cfg->y_rgb_sep_lut_idx = scale_v2->y_rgb_sep_lut_idx;
  124. cfg->uv_sep_lut_idx = scale_v2->uv_sep_lut_idx;
  125. cfg->de.prec_shift = scale_v2->de.prec_shift;
  126. cfg->dir_weight = scale_v2->dir_weight;
  127. cfg->dyn_exp_disabled = (scale_v2->flags & SDE_DYN_EXP_DISABLE) ? 1 : 0;
  128. cfg->de.enable = scale_v2->de.enable;
  129. cfg->de.sharpen_level1 = scale_v2->de.sharpen_level1;
  130. cfg->de.sharpen_level2 = scale_v2->de.sharpen_level2;
  131. cfg->de.clip = scale_v2->de.clip;
  132. cfg->de.limit = scale_v2->de.limit;
  133. cfg->de.thr_quiet = scale_v2->de.thr_quiet;
  134. cfg->de.thr_dieout = scale_v2->de.thr_dieout;
  135. cfg->de.thr_low = scale_v2->de.thr_low;
  136. cfg->de.thr_high = scale_v2->de.thr_high;
  137. cfg->de.blend = scale_v2->de_blend;
  138. cfg->de_lpf_h = scale_v2->de_lpf_h;
  139. cfg->de_lpf_l = scale_v2->de_lpf_l;
  140. cfg->de_lpf_m = scale_v2->de_lpf_m;
  141. for (i = 0; i < SDE_MAX_DE_CURVES; i++) {
  142. cfg->de.adjust_a[i] = scale_v2->de.adjust_a[i];
  143. cfg->de.adjust_b[i] = scale_v2->de.adjust_b[i];
  144. cfg->de.adjust_c[i] = scale_v2->de.adjust_c[i];
  145. }
  146. }
  147. static void _sde_hw_setup_scaler3_lut(struct sde_hw_blk_reg_map *c,
  148. struct sde_hw_scaler3_cfg *scaler3_cfg, u32 offset)
  149. {
  150. int i, j, filter;
  151. int config_lut = 0x0;
  152. unsigned long lut_flags;
  153. u32 lut_addr, lut_offset, lut_len;
  154. u32 *lut[QSEED3_FILTERS] = {NULL, NULL, NULL, NULL, NULL};
  155. static const uint32_t off_tbl[QSEED3_FILTERS][QSEED3_LUT_REGIONS][2] = {
  156. {{18, 0x000}, {12, 0x120}, {12, 0x1E0}, {8, 0x2A0} },
  157. {{6, 0x320}, {3, 0x3E0}, {3, 0x440}, {3, 0x4A0} },
  158. {{6, 0x500}, {3, 0x5c0}, {3, 0x620}, {3, 0x680} },
  159. {{6, 0x380}, {3, 0x410}, {3, 0x470}, {3, 0x4d0} },
  160. {{6, 0x560}, {3, 0x5f0}, {3, 0x650}, {3, 0x6b0} },
  161. };
  162. lut_flags = (unsigned long) scaler3_cfg->lut_flag;
  163. if (test_bit(QSEED3_COEF_LUT_DIR_BIT, &lut_flags) &&
  164. (scaler3_cfg->dir_len == QSEED3_DIR_LUT_SIZE)) {
  165. lut[0] = scaler3_cfg->dir_lut;
  166. config_lut = 1;
  167. }
  168. if (test_bit(QSEED3_COEF_LUT_Y_CIR_BIT, &lut_flags) &&
  169. (scaler3_cfg->y_rgb_cir_lut_idx < QSEED3_CIRCULAR_LUTS) &&
  170. (scaler3_cfg->cir_len == QSEED3_CIR_LUT_SIZE)) {
  171. lut[1] = scaler3_cfg->cir_lut +
  172. scaler3_cfg->y_rgb_cir_lut_idx * QSEED3_LUT_SIZE;
  173. config_lut = 1;
  174. }
  175. if (test_bit(QSEED3_COEF_LUT_UV_CIR_BIT, &lut_flags) &&
  176. (scaler3_cfg->uv_cir_lut_idx < QSEED3_CIRCULAR_LUTS) &&
  177. (scaler3_cfg->cir_len == QSEED3_CIR_LUT_SIZE)) {
  178. lut[2] = scaler3_cfg->cir_lut +
  179. scaler3_cfg->uv_cir_lut_idx * QSEED3_LUT_SIZE;
  180. config_lut = 1;
  181. }
  182. if (test_bit(QSEED3_COEF_LUT_Y_SEP_BIT, &lut_flags) &&
  183. (scaler3_cfg->y_rgb_sep_lut_idx < QSEED3_SEPARABLE_LUTS) &&
  184. (scaler3_cfg->sep_len == QSEED3_SEP_LUT_SIZE)) {
  185. lut[3] = scaler3_cfg->sep_lut +
  186. scaler3_cfg->y_rgb_sep_lut_idx * QSEED3_LUT_SIZE;
  187. config_lut = 1;
  188. }
  189. if (test_bit(QSEED3_COEF_LUT_UV_SEP_BIT, &lut_flags) &&
  190. (scaler3_cfg->uv_sep_lut_idx < QSEED3_SEPARABLE_LUTS) &&
  191. (scaler3_cfg->sep_len == QSEED3_SEP_LUT_SIZE)) {
  192. lut[4] = scaler3_cfg->sep_lut +
  193. scaler3_cfg->uv_sep_lut_idx * QSEED3_LUT_SIZE;
  194. config_lut = 1;
  195. }
  196. if (config_lut) {
  197. for (filter = 0; filter < QSEED3_FILTERS; filter++) {
  198. if (!lut[filter])
  199. continue;
  200. lut_offset = 0;
  201. for (i = 0; i < QSEED3_LUT_REGIONS; i++) {
  202. lut_addr = QSEED3_COEF_LUT_OFF + offset
  203. + off_tbl[filter][i][1];
  204. lut_len = off_tbl[filter][i][0] << 2;
  205. for (j = 0; j < lut_len; j++) {
  206. SDE_REG_WRITE(c,
  207. lut_addr,
  208. (lut[filter])[lut_offset++]);
  209. lut_addr += 4;
  210. }
  211. }
  212. }
  213. }
  214. if (test_bit(QSEED3_COEF_LUT_SWAP_BIT, &lut_flags))
  215. SDE_REG_WRITE(c, QSEED3_COEF_LUT_CTRL + offset, BIT(0));
  216. }
  217. static void _sde_hw_setup_scaler3lite_lut(struct sde_hw_blk_reg_map *c,
  218. struct sde_hw_scaler3_cfg *scaler3_cfg, u32 offset)
  219. {
  220. int i, filter;
  221. int config_lut = 0x0;
  222. unsigned long lut_flags;
  223. u32 lut_addr, lut_offset;
  224. u32 *lut[QSEED3LITE_FILTERS] = {NULL, NULL};
  225. static const uint32_t off_tbl[QSEED3LITE_FILTERS] = {0x000, 0x200};
  226. SDE_REG_WRITE(c, QSEED3L_DIR_FILTER_WEIGHT + offset,
  227. scaler3_cfg->dir_weight & 0xFF);
  228. /* destination scaler case */
  229. if (!scaler3_cfg->sep_lut)
  230. return;
  231. lut_flags = (unsigned long) scaler3_cfg->lut_flag;
  232. if (test_bit(QSEED3L_COEF_LUT_Y_SEP_BIT, &lut_flags) &&
  233. (scaler3_cfg->y_rgb_sep_lut_idx < QSEED3L_SEPARABLE_LUTS) &&
  234. (scaler3_cfg->sep_len == QSEED3L_SEP_LUT_SIZE)) {
  235. lut[0] = scaler3_cfg->sep_lut +
  236. scaler3_cfg->y_rgb_sep_lut_idx * QSEED3L_LUT_SIZE;
  237. config_lut = 1;
  238. }
  239. if (test_bit(QSEED3L_COEF_LUT_UV_SEP_BIT, &lut_flags) &&
  240. (scaler3_cfg->uv_sep_lut_idx < QSEED3L_SEPARABLE_LUTS) &&
  241. (scaler3_cfg->sep_len == QSEED3L_SEP_LUT_SIZE)) {
  242. lut[1] = scaler3_cfg->sep_lut +
  243. scaler3_cfg->uv_sep_lut_idx * QSEED3L_LUT_SIZE;
  244. config_lut = 1;
  245. }
  246. if (config_lut) {
  247. for (filter = 0; filter < QSEED3LITE_FILTERS; filter++) {
  248. if (!lut[filter])
  249. continue;
  250. lut_offset = 0;
  251. lut_addr = QSEED3L_COEF_LUT_OFF + offset +
  252. off_tbl[filter];
  253. for (i = 0; i < QSEED3L_LUT_SIZE; i++) {
  254. SDE_REG_WRITE(c, lut_addr,
  255. (lut[filter])[lut_offset++]);
  256. lut_addr += 4;
  257. }
  258. }
  259. }
  260. if (test_bit(QSEED3L_COEF_LUT_SWAP_BIT, &lut_flags))
  261. SDE_REG_WRITE(c, QSEED3L_COEF_LUT_CTRL + offset, BIT(0));
  262. }
  263. static void _sde_hw_setup_scaler3_de(struct sde_hw_blk_reg_map *c,
  264. struct sde_hw_scaler3_de_cfg *de_cfg, u32 offset)
  265. {
  266. u32 sharp_lvl, sharp_ctl, shape_ctl, de_thr;
  267. u32 adjust_a, adjust_b, adjust_c;
  268. if (!de_cfg->enable)
  269. return;
  270. sharp_lvl = (de_cfg->sharpen_level1 & 0x1FF) |
  271. ((de_cfg->sharpen_level2 & 0x1FF) << 16);
  272. sharp_ctl = ((de_cfg->limit & 0xF) << 9) |
  273. ((de_cfg->prec_shift & 0x7) << 13) |
  274. ((de_cfg->clip & 0x7) << 16) |
  275. ((de_cfg->blend & 0xF) << 20);
  276. shape_ctl = (de_cfg->thr_quiet & 0xFF) |
  277. ((de_cfg->thr_dieout & 0x3FF) << 16);
  278. de_thr = (de_cfg->thr_low & 0x3FF) |
  279. ((de_cfg->thr_high & 0x3FF) << 16);
  280. adjust_a = (de_cfg->adjust_a[0] & 0x3FF) |
  281. ((de_cfg->adjust_a[1] & 0x3FF) << 10) |
  282. ((de_cfg->adjust_a[2] & 0x3FF) << 20);
  283. adjust_b = (de_cfg->adjust_b[0] & 0x3FF) |
  284. ((de_cfg->adjust_b[1] & 0x3FF) << 10) |
  285. ((de_cfg->adjust_b[2] & 0x3FF) << 20);
  286. adjust_c = (de_cfg->adjust_c[0] & 0x3FF) |
  287. ((de_cfg->adjust_c[1] & 0x3FF) << 10) |
  288. ((de_cfg->adjust_c[2] & 0x3FF) << 20);
  289. SDE_REG_WRITE(c, QSEED3_DE_SHARPEN + offset, sharp_lvl);
  290. SDE_REG_WRITE(c, QSEED3_DE_SHARPEN_CTL + offset, sharp_ctl);
  291. SDE_REG_WRITE(c, QSEED3_DE_SHAPE_CTL + offset, shape_ctl);
  292. SDE_REG_WRITE(c, QSEED3_DE_THRESHOLD + offset, de_thr);
  293. SDE_REG_WRITE(c, QSEED3_DE_ADJUST_DATA_0 + offset, adjust_a);
  294. SDE_REG_WRITE(c, QSEED3_DE_ADJUST_DATA_1 + offset, adjust_b);
  295. SDE_REG_WRITE(c, QSEED3_DE_ADJUST_DATA_2 + offset, adjust_c);
  296. }
  297. static inline scaler_lut_type get_scaler_lut(
  298. struct sde_hw_scaler3_cfg *scaler3_cfg, u32 scaler_version)
  299. {
  300. scaler_lut_type lut_ptr = _sde_hw_setup_scaler3lite_lut;
  301. if (!(scaler3_cfg->lut_flag))
  302. return NULL;
  303. if (scaler_version < QSEED3LITE_SCALER_VERSION)
  304. lut_ptr = _sde_hw_setup_scaler3_lut;
  305. return lut_ptr;
  306. }
  307. void sde_hw_setup_scaler3(struct sde_hw_blk_reg_map *c,
  308. struct sde_hw_scaler3_cfg *scaler3_cfg, u32 scaler_version,
  309. u32 scaler_offset, const struct sde_format *format, bool de_lpf)
  310. {
  311. u32 op_mode = 0;
  312. u32 phase_init, preload, src_y_rgb, src_uv, dst;
  313. scaler_lut_type setup_lut = NULL;
  314. u32 de_lpf_blend = 0;
  315. if (!scaler3_cfg->enable)
  316. goto end;
  317. op_mode |= BIT(0);
  318. op_mode |= (scaler3_cfg->y_rgb_filter_cfg & 0x3) << 16;
  319. if (format && SDE_FORMAT_IS_YUV(format)) {
  320. op_mode |= BIT(12);
  321. op_mode |= (scaler3_cfg->uv_filter_cfg & 0x3) << 24;
  322. }
  323. op_mode |= (scaler3_cfg->blend_cfg & 1) << 31;
  324. op_mode |= (scaler3_cfg->dir_en) ? BIT(4) : 0;
  325. op_mode |= (scaler3_cfg->dyn_exp_disabled) ? BIT(13) : 0;
  326. preload =
  327. ((scaler3_cfg->preload_x[0] & 0x7F) << 0) |
  328. ((scaler3_cfg->preload_y[0] & 0x7F) << 8) |
  329. ((scaler3_cfg->preload_x[1] & 0x7F) << 16) |
  330. ((scaler3_cfg->preload_y[1] & 0x7F) << 24);
  331. src_y_rgb = (scaler3_cfg->src_width[0] & 0x1FFFF) |
  332. ((scaler3_cfg->src_height[0] & 0x1FFFF) << 16);
  333. src_uv = (scaler3_cfg->src_width[1] & 0x1FFFF) |
  334. ((scaler3_cfg->src_height[1] & 0x1FFFF) << 16);
  335. dst = (scaler3_cfg->dst_width & 0x1FFFF) |
  336. ((scaler3_cfg->dst_height & 0x1FFFF) << 16);
  337. if (scaler3_cfg->de.enable) {
  338. _sde_hw_setup_scaler3_de(c, &scaler3_cfg->de, scaler_offset);
  339. op_mode |= BIT(8);
  340. }
  341. setup_lut = get_scaler_lut(scaler3_cfg, scaler_version);
  342. if (setup_lut)
  343. setup_lut(c, scaler3_cfg, scaler_offset);
  344. if (scaler_version == 0x1002) {
  345. phase_init =
  346. ((scaler3_cfg->init_phase_x[0] & 0x3F) << 0) |
  347. ((scaler3_cfg->init_phase_y[0] & 0x3F) << 8) |
  348. ((scaler3_cfg->init_phase_x[1] & 0x3F) << 16) |
  349. ((scaler3_cfg->init_phase_y[1] & 0x3F) << 24);
  350. SDE_REG_WRITE(c, QSEED3_PHASE_INIT + scaler_offset, phase_init);
  351. } else {
  352. SDE_REG_WRITE(c, QSEED3_PHASE_INIT_Y_H + scaler_offset,
  353. scaler3_cfg->init_phase_x[0] & 0x1FFFFF);
  354. SDE_REG_WRITE(c, QSEED3_PHASE_INIT_Y_V + scaler_offset,
  355. scaler3_cfg->init_phase_y[0] & 0x1FFFFF);
  356. SDE_REG_WRITE(c, QSEED3_PHASE_INIT_UV_H + scaler_offset,
  357. scaler3_cfg->init_phase_x[1] & 0x1FFFFF);
  358. SDE_REG_WRITE(c, QSEED3_PHASE_INIT_UV_V + scaler_offset,
  359. scaler3_cfg->init_phase_y[1] & 0x1FFFFF);
  360. }
  361. SDE_REG_WRITE(c, QSEED3_PHASE_STEP_Y_H + scaler_offset,
  362. scaler3_cfg->phase_step_x[0] & 0xFFFFFF);
  363. SDE_REG_WRITE(c, QSEED3_PHASE_STEP_Y_V + scaler_offset,
  364. scaler3_cfg->phase_step_y[0] & 0xFFFFFF);
  365. SDE_REG_WRITE(c, QSEED3_PHASE_STEP_UV_H + scaler_offset,
  366. scaler3_cfg->phase_step_x[1] & 0xFFFFFF);
  367. SDE_REG_WRITE(c, QSEED3_PHASE_STEP_UV_V + scaler_offset,
  368. scaler3_cfg->phase_step_y[1] & 0xFFFFFF);
  369. SDE_REG_WRITE(c, QSEED3_PRELOAD + scaler_offset, preload);
  370. SDE_REG_WRITE(c, QSEED3_SRC_SIZE_Y_RGB_A + scaler_offset, src_y_rgb);
  371. SDE_REG_WRITE(c, QSEED3_SRC_SIZE_UV + scaler_offset, src_uv);
  372. SDE_REG_WRITE(c, QSEED3_DST_SIZE + scaler_offset, dst);
  373. if (de_lpf && (scaler3_cfg->de_lpf_flags & SDE_DYN_EXP_DISABLE)) {
  374. de_lpf_blend = (scaler3_cfg->de_lpf_h & 0x3FF) |
  375. ((scaler3_cfg->de_lpf_l & 0x3FF) << 10) |
  376. ((scaler3_cfg->de_lpf_m & 0x3FF) << 20);
  377. SDE_REG_WRITE(c, QSEED3_DE_LPF_BLEND, de_lpf_blend);
  378. }
  379. end:
  380. if (format && !SDE_FORMAT_IS_DX(format))
  381. op_mode |= BIT(14);
  382. if (format && format->alpha_enable) {
  383. op_mode |= BIT(10);
  384. if (scaler_version == 0x1002)
  385. op_mode |= (scaler3_cfg->alpha_filter_cfg & 0x1) << 30;
  386. else
  387. op_mode |= (scaler3_cfg->alpha_filter_cfg & 0x3) << 29;
  388. }
  389. SDE_REG_WRITE(c, QSEED3_OP_MODE + scaler_offset, op_mode);
  390. }
  391. void sde_hw_csc_matrix_coeff_setup(struct sde_hw_blk_reg_map *c,
  392. u32 csc_reg_off, struct sde_csc_cfg *data,
  393. u32 shift_bit)
  394. {
  395. u32 val;
  396. if (!c || !data)
  397. return;
  398. val = ((data->csc_mv[0] >> shift_bit) & 0x1FFF) |
  399. (((data->csc_mv[1] >> shift_bit) & 0x1FFF) << 16);
  400. SDE_REG_WRITE(c, csc_reg_off, val);
  401. val = ((data->csc_mv[2] >> shift_bit) & 0x1FFF) |
  402. (((data->csc_mv[3] >> shift_bit) & 0x1FFF) << 16);
  403. SDE_REG_WRITE(c, csc_reg_off + 0x4, val);
  404. val = ((data->csc_mv[4] >> shift_bit) & 0x1FFF) |
  405. (((data->csc_mv[5] >> shift_bit) & 0x1FFF) << 16);
  406. SDE_REG_WRITE(c, csc_reg_off + 0x8, val);
  407. val = ((data->csc_mv[6] >> shift_bit) & 0x1FFF) |
  408. (((data->csc_mv[7] >> shift_bit) & 0x1FFF) << 16);
  409. SDE_REG_WRITE(c, csc_reg_off + 0xc, val);
  410. val = (data->csc_mv[8] >> shift_bit) & 0x1FFF;
  411. SDE_REG_WRITE(c, csc_reg_off + 0x10, val);
  412. }
  413. void sde_hw_csc_setup(struct sde_hw_blk_reg_map *c,
  414. u32 csc_reg_off,
  415. struct sde_csc_cfg *data, bool csc10)
  416. {
  417. u32 clamp_shift = csc10 ? 16 : 8;
  418. u32 val;
  419. if (!c || !data)
  420. return;
  421. /* matrix coeff - convert S15.16 to S4.9 */
  422. sde_hw_csc_matrix_coeff_setup(c, csc_reg_off, data, CSC_MATRIX_SHIFT);
  423. /* Pre clamp */
  424. val = (data->csc_pre_lv[0] << clamp_shift) | data->csc_pre_lv[1];
  425. SDE_REG_WRITE(c, csc_reg_off + 0x14, val);
  426. val = (data->csc_pre_lv[2] << clamp_shift) | data->csc_pre_lv[3];
  427. SDE_REG_WRITE(c, csc_reg_off + 0x18, val);
  428. val = (data->csc_pre_lv[4] << clamp_shift) | data->csc_pre_lv[5];
  429. SDE_REG_WRITE(c, csc_reg_off + 0x1c, val);
  430. /* Post clamp */
  431. val = (data->csc_post_lv[0] << clamp_shift) | data->csc_post_lv[1];
  432. SDE_REG_WRITE(c, csc_reg_off + 0x20, val);
  433. val = (data->csc_post_lv[2] << clamp_shift) | data->csc_post_lv[3];
  434. SDE_REG_WRITE(c, csc_reg_off + 0x24, val);
  435. val = (data->csc_post_lv[4] << clamp_shift) | data->csc_post_lv[5];
  436. SDE_REG_WRITE(c, csc_reg_off + 0x28, val);
  437. /* Pre-Bias */
  438. SDE_REG_WRITE(c, csc_reg_off + 0x2c, data->csc_pre_bv[0]);
  439. SDE_REG_WRITE(c, csc_reg_off + 0x30, data->csc_pre_bv[1]);
  440. SDE_REG_WRITE(c, csc_reg_off + 0x34, data->csc_pre_bv[2]);
  441. /* Post-Bias */
  442. SDE_REG_WRITE(c, csc_reg_off + 0x38, data->csc_post_bv[0]);
  443. SDE_REG_WRITE(c, csc_reg_off + 0x3c, data->csc_post_bv[1]);
  444. SDE_REG_WRITE(c, csc_reg_off + 0x40, data->csc_post_bv[2]);
  445. }
  446. /**
  447. * _sde_copy_formats - copy formats from src_list to dst_list
  448. * @dst_list: pointer to destination list where to copy formats
  449. * @dst_list_size: size of destination list
  450. * @dst_list_pos: starting position on the list where to copy formats
  451. * @src_list: pointer to source list where to copy formats from
  452. * @src_list_size: size of source list
  453. * Return: number of elements populated
  454. */
  455. uint32_t sde_copy_formats(
  456. struct sde_format_extended *dst_list,
  457. uint32_t dst_list_size,
  458. uint32_t dst_list_pos,
  459. const struct sde_format_extended *src_list,
  460. uint32_t src_list_size)
  461. {
  462. uint32_t cur_pos, i;
  463. if (!dst_list || !src_list || (dst_list_pos >= (dst_list_size - 1)))
  464. return 0;
  465. for (i = 0, cur_pos = dst_list_pos;
  466. (cur_pos < (dst_list_size - 1)) && (i < src_list_size)
  467. && src_list[i].fourcc_format; ++i, ++cur_pos)
  468. dst_list[cur_pos] = src_list[i];
  469. dst_list[cur_pos].fourcc_format = 0;
  470. return i;
  471. }