sde_hw_dnsc_blur.c 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2021-2022 Qualcomm Innovation Center, Inc. All rights reserved.
  4. * Copyright (c) 2021, The Linux Foundation. All rights reserved.
  5. */
  6. #include "sde_hw_mdss.h"
  7. #include "sde_hwio.h"
  8. #include "sde_hw_catalog.h"
  9. #include "sde_hw_dnsc_blur.h"
  10. #include "sde_dbg.h"
  11. #include "sde_kms.h"
  12. #define DNSC_BLUR_OP_MODE 0x0
  13. #define DNSC_BLUR_BLUR_RATIO_H 0x4
  14. #define DNSC_BLUR_BLUR_RATIO_V 0x8
  15. #define DNSC_BLUR_PCMN_PHASE_INIT_H 0xC
  16. #define DNSC_BLUR_PCMN_PHASE_STEP_H 0x10
  17. #define DNSC_BLUR_PCMN_PHASE_INIT_V 0x14
  18. #define DNSC_BLUR_PCMN_PHASE_STEP_V 0x18
  19. #define DNSC_BLUR_OUT_IMG_SIZE 0x1C
  20. #define DNSC_BLUR_GAUS_COEF_LUT_SEL 0x20
  21. #define DNSC_BLUR_MUX 0x24
  22. #define DNSC_BLUR_SRC_IMG_SIZE 0x28
  23. #define DNSC_BLUR_GAUS_COEF_LUT_H0 0x0
  24. #define DNSC_BLUR_GAUS_COEF_LUT_V0 0x100
  25. #define DNSC_BLUR_GAUS_COEF_LUT_H1 0x200
  26. #define DNSC_BLUR_GAUS_COEF_LUT_V1 0x300
  27. #define DNSC_BLUR_DITHER_OP_MODE 0x0
  28. #define DNSC_BLUR_DITHER_BITDEPTH 0x4
  29. #define DNSC_BLUR_DITHER_MATRIX_ROW0 0x8
  30. /* DNSC_BLUR_OP_MODE bits */
  31. #define DNSC_BLUR_OPMODE_ENABLE BIT(0)
  32. #define DNSC_BLUR_OPMODE_DWNS_H_EN BIT(1)
  33. #define DNSC_BLUR_OPMODE_DWNS_V_EN BIT(2)
  34. #define DNSC_BLUR_OPMODE_PCMN_H BIT(8)
  35. #define DNSC_BLUR_OPMODE_PCMN_V BIT(12)
  36. #define DNSC_BLUR_OPMODE_OUT_RND_8B_EN BIT(16)
  37. static struct sde_dnsc_blur_cfg *_dnsc_blur_offset(enum sde_dnsc_blur idx,
  38. struct sde_mdss_cfg *m, void __iomem *addr, struct sde_hw_blk_reg_map *b)
  39. {
  40. int i;
  41. for (i = 0; i < m->dnsc_blur_count; i++) {
  42. if (idx == m->dnsc_blur[i].id) {
  43. b->base_off = addr;
  44. b->blk_off = m->dnsc_blur[i].base;
  45. b->length = m->dnsc_blur[i].len;
  46. b->hw_rev = m->hw_rev;
  47. b->log_mask = SDE_DBG_MASK_DNSC_BLUR;
  48. return &m->dnsc_blur[i];
  49. }
  50. }
  51. return ERR_PTR(-EINVAL);
  52. }
  53. static inline int _dnsc_blur_subblk_offset(struct sde_hw_dnsc_blur *hw_dnsc_blur,
  54. int s_id, u32 *base)
  55. {
  56. const struct sde_dnsc_blur_sub_blks *sblk;
  57. sblk = hw_dnsc_blur->caps->sblk;
  58. switch (s_id) {
  59. case SDE_DNSC_BLUR_GAUS_LUT:
  60. *base = sblk->gaus_lut.base;
  61. break;
  62. case SDE_DNSC_BLUR_DITHER:
  63. *base = sblk->dither.base;
  64. break;
  65. default:
  66. return -EINVAL;
  67. }
  68. return 0;
  69. }
  70. static void _sde_hw_dnsc_blur_gaus_lut_setup(struct sde_hw_dnsc_blur *hw_dnsc_blur,
  71. struct sde_drm_dnsc_blur_cfg *cfg, u32 lut_sel)
  72. {
  73. struct sde_hw_blk_reg_map *hw = &hw_dnsc_blur->hw;
  74. u32 lut_off, base;
  75. int i;
  76. if (_dnsc_blur_subblk_offset(hw_dnsc_blur, SDE_DNSC_BLUR_GAUS_LUT, &base))
  77. return;
  78. SDE_REG_WRITE(hw, DNSC_BLUR_GAUS_COEF_LUT_SEL, lut_sel);
  79. if (cfg->flags_h & DNSC_BLUR_GAUS_FILTER) {
  80. lut_off = lut_sel ? DNSC_BLUR_GAUS_COEF_LUT_H1 : DNSC_BLUR_GAUS_COEF_LUT_H0;
  81. for (i = 0; i < DNSC_BLUR_COEF_NUM; i++)
  82. SDE_REG_WRITE(hw, lut_off + (i * 0x4) + base, cfg->coef_hori[i]);
  83. }
  84. if (cfg->flags_v & DNSC_BLUR_GAUS_FILTER) {
  85. lut_off = lut_sel ? DNSC_BLUR_GAUS_COEF_LUT_V1 : DNSC_BLUR_GAUS_COEF_LUT_V0;
  86. for (i = 0; i < DNSC_BLUR_COEF_NUM; i++)
  87. SDE_REG_WRITE(hw, lut_off + (i * 0x4) + base, cfg->coef_vert[i]);
  88. }
  89. }
  90. static void _sde_hw_dnsc_blur_filter_setup(struct sde_hw_dnsc_blur *hw_dnsc_blur,
  91. struct sde_drm_dnsc_blur_cfg *cfg, u32 lut_sel)
  92. {
  93. struct sde_hw_blk_reg_map *hw = &hw_dnsc_blur->hw;
  94. u32 val;
  95. /* PCMN */
  96. if (cfg->flags_h & DNSC_BLUR_PCMN_FILTER) {
  97. SDE_REG_WRITE(hw, DNSC_BLUR_PCMN_PHASE_INIT_H, cfg->phase_init_h);
  98. SDE_REG_WRITE(hw, DNSC_BLUR_PCMN_PHASE_STEP_H, cfg->phase_step_h);
  99. }
  100. if (cfg->flags_v & DNSC_BLUR_PCMN_FILTER) {
  101. SDE_REG_WRITE(hw, DNSC_BLUR_PCMN_PHASE_INIT_V, cfg->phase_init_v);
  102. SDE_REG_WRITE(hw, DNSC_BLUR_PCMN_PHASE_STEP_V, cfg->phase_step_v);
  103. }
  104. /* Gaussian */
  105. if (cfg->flags_h & DNSC_BLUR_GAUS_FILTER) {
  106. val = (cfg->norm_h << 16) | cfg->ratio_h;
  107. SDE_REG_WRITE(hw, DNSC_BLUR_BLUR_RATIO_H, val);
  108. }
  109. if (cfg->flags_v & DNSC_BLUR_GAUS_FILTER) {
  110. val = (cfg->norm_v << 16) | cfg->ratio_v;
  111. SDE_REG_WRITE(hw, DNSC_BLUR_BLUR_RATIO_V, val);
  112. }
  113. if ((cfg->flags_v | cfg->flags_h) & DNSC_BLUR_GAUS_FILTER)
  114. _sde_hw_dnsc_blur_gaus_lut_setup(hw_dnsc_blur, cfg, lut_sel);
  115. }
  116. static void _sde_hw_dnsc_blur_setup(struct sde_hw_dnsc_blur *hw_dnsc_blur,
  117. struct sde_drm_dnsc_blur_cfg *cfg, u32 lut_sel)
  118. {
  119. struct sde_hw_blk_reg_map *hw = &hw_dnsc_blur->hw;
  120. u32 opmode = 0;
  121. /* disable, when no scaling involved */
  122. if (!cfg || !(cfg->flags & DNSC_BLUR_EN)) {
  123. SDE_REG_WRITE(hw, DNSC_BLUR_OP_MODE, 0x0);
  124. return;
  125. }
  126. opmode = DNSC_BLUR_OPMODE_ENABLE;
  127. opmode |= (cfg->flags & DNSC_BLUR_RND_8B_EN) ? DNSC_BLUR_OPMODE_OUT_RND_8B_EN : 0;
  128. if (cfg->flags_h) {
  129. opmode |= DNSC_BLUR_OPMODE_DWNS_H_EN;
  130. opmode |= (cfg->flags_h & DNSC_BLUR_PCMN_FILTER) ? DNSC_BLUR_OPMODE_PCMN_H : 0;
  131. }
  132. if (cfg->flags_v) {
  133. opmode |= DNSC_BLUR_OPMODE_DWNS_V_EN;
  134. opmode |= (cfg->flags_v & DNSC_BLUR_PCMN_FILTER) ? DNSC_BLUR_OPMODE_PCMN_V : 0;
  135. }
  136. _sde_hw_dnsc_blur_filter_setup(hw_dnsc_blur, cfg, lut_sel);
  137. SDE_REG_WRITE(hw, DNSC_BLUR_OP_MODE, opmode);
  138. SDE_REG_WRITE(hw, DNSC_BLUR_OUT_IMG_SIZE, (cfg->dst_height << 16) | cfg->dst_width);
  139. SDE_REG_WRITE(hw, DNSC_BLUR_SRC_IMG_SIZE, (cfg->src_height << 16) | cfg->src_width);
  140. }
  141. static void _sde_hw_dnsc_blur_dither_setup(struct sde_hw_dnsc_blur *hw_dnsc_blur,
  142. struct sde_drm_dnsc_blur_cfg *cfg)
  143. {
  144. struct sde_hw_blk_reg_map *hw = &hw_dnsc_blur->hw;
  145. int i;
  146. u32 base, data, offset;
  147. if (_dnsc_blur_subblk_offset(hw_dnsc_blur, SDE_DNSC_BLUR_DITHER, &base))
  148. return;
  149. /* disable case */
  150. if (!cfg || !(cfg->flags & DNSC_BLUR_DITHER_EN)) {
  151. SDE_REG_WRITE(hw, DNSC_BLUR_DITHER_OP_MODE + base, 0x0);
  152. return;
  153. }
  154. data = (dither_depth_map[cfg->c0_bitdepth] & REG_MASK(2)) |
  155. ((dither_depth_map[cfg->c1_bitdepth] & REG_MASK(2)) << 2) |
  156. ((dither_depth_map[cfg->c2_bitdepth] & REG_MASK(2)) << 4) |
  157. ((dither_depth_map[cfg->c3_bitdepth] & REG_MASK(2)) << 6) |
  158. ((cfg->temporal_en) ? (1 << 8) : 0);
  159. SDE_REG_WRITE(hw, DNSC_BLUR_DITHER_BITDEPTH + base, data);
  160. offset = DNSC_BLUR_DITHER_MATRIX_ROW0;
  161. for (i = 0; i < DNSC_BLUR_DITHER_MATRIX_SZ - 3; i += 4) {
  162. data = (cfg->dither_matrix[i] & REG_MASK(4)) |
  163. ((cfg->dither_matrix[i + 1] & REG_MASK(4)) << 4) |
  164. ((cfg->dither_matrix[i + 2] & REG_MASK(4)) << 8) |
  165. ((cfg->dither_matrix[i + 3] & REG_MASK(4)) << 12);
  166. SDE_REG_WRITE(hw, base + offset, data);
  167. offset += 4;
  168. }
  169. data = BIT(0);
  170. data |= (cfg->dither_flags & DNSC_BLUR_DITHER_LUMA_MODE) ? BIT(4) : 0;
  171. SDE_REG_WRITE(hw, DNSC_BLUR_DITHER_OP_MODE + base, data);
  172. }
  173. static void _sde_hw_dnsc_blur_bind_pingpong_blk(struct sde_hw_dnsc_blur *hw_dnsc_blur,
  174. bool enable, const enum sde_pingpong pp, bool cwb)
  175. {
  176. struct sde_hw_blk_reg_map *hw = &hw_dnsc_blur->hw;
  177. int mux_cfg;
  178. if (enable && (pp < PINGPONG_0 || pp >= PINGPONG_MAX))
  179. return;
  180. if (enable)
  181. mux_cfg = cwb ? 0xD : (pp - PINGPONG_0) & 0x7;
  182. else
  183. mux_cfg = 0xF;
  184. SDE_REG_WRITE(hw, DNSC_BLUR_MUX, mux_cfg);
  185. }
  186. static void _setup_dnsc_blur_ops(struct sde_hw_dnsc_blur_ops *ops, unsigned long features)
  187. {
  188. ops->setup_dnsc_blur = _sde_hw_dnsc_blur_setup;
  189. ops->setup_dither = _sde_hw_dnsc_blur_dither_setup;
  190. ops->bind_pingpong_blk = _sde_hw_dnsc_blur_bind_pingpong_blk;
  191. }
  192. struct sde_hw_blk_reg_map *sde_hw_dnsc_blur_init(enum sde_dnsc_blur idx,
  193. void __iomem *addr, struct sde_mdss_cfg *m)
  194. {
  195. struct sde_hw_dnsc_blur *c;
  196. struct sde_dnsc_blur_cfg *cfg;
  197. if (!addr || !m)
  198. return ERR_PTR(-EINVAL);
  199. c = kzalloc(sizeof(*c), GFP_KERNEL);
  200. if (!c)
  201. return ERR_PTR(-ENOMEM);
  202. cfg = _dnsc_blur_offset(idx, m, addr, &c->hw);
  203. if (IS_ERR_OR_NULL(cfg)) {
  204. kfree(c);
  205. return ERR_PTR(-EINVAL);
  206. }
  207. c->idx = idx;
  208. c->caps = cfg;
  209. _setup_dnsc_blur_ops(&c->ops, c->caps->features);
  210. sde_dbg_reg_register_dump_range(SDE_DBG_NAME, cfg->name, c->hw.blk_off,
  211. c->hw.blk_off + c->hw.length, c->hw.xin_id);
  212. if (cfg->sblk->gaus_lut.base && cfg->sblk->gaus_lut.len)
  213. sde_dbg_reg_register_dump_range(SDE_DBG_NAME, cfg->sblk->gaus_lut.name,
  214. c->hw.blk_off + cfg->sblk->gaus_lut.base,
  215. c->hw.blk_off + cfg->sblk->gaus_lut.base +
  216. cfg->sblk->gaus_lut.len, c->hw.xin_id);
  217. if (cfg->sblk->dither.base && cfg->sblk->dither.len)
  218. sde_dbg_reg_register_dump_range(SDE_DBG_NAME, cfg->sblk->dither.name,
  219. c->hw.blk_off + cfg->sblk->dither.base,
  220. c->hw.blk_off + cfg->sblk->dither.base +
  221. cfg->sblk->dither.len, c->hw.xin_id);
  222. return &c->hw;
  223. }
  224. void sde_hw_dnsc_blur_destroy(struct sde_hw_blk_reg_map *hw)
  225. {
  226. if (hw)
  227. kfree(to_sde_hw_dnsc_blur(hw));
  228. }