clk-composite.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved.
  4. */
  5. #include <linux/clk-provider.h>
  6. #include <linux/device.h>
  7. #include <linux/err.h>
  8. #include <linux/slab.h>
  9. static u8 clk_composite_get_parent(struct clk_hw *hw)
  10. {
  11. struct clk_composite *composite = to_clk_composite(hw);
  12. const struct clk_ops *mux_ops = composite->mux_ops;
  13. struct clk_hw *mux_hw = composite->mux_hw;
  14. __clk_hw_set_clk(mux_hw, hw);
  15. return mux_ops->get_parent(mux_hw);
  16. }
  17. static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
  18. {
  19. struct clk_composite *composite = to_clk_composite(hw);
  20. const struct clk_ops *mux_ops = composite->mux_ops;
  21. struct clk_hw *mux_hw = composite->mux_hw;
  22. __clk_hw_set_clk(mux_hw, hw);
  23. return mux_ops->set_parent(mux_hw, index);
  24. }
  25. static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
  26. unsigned long parent_rate)
  27. {
  28. struct clk_composite *composite = to_clk_composite(hw);
  29. const struct clk_ops *rate_ops = composite->rate_ops;
  30. struct clk_hw *rate_hw = composite->rate_hw;
  31. __clk_hw_set_clk(rate_hw, hw);
  32. return rate_ops->recalc_rate(rate_hw, parent_rate);
  33. }
  34. static int clk_composite_determine_rate_for_parent(struct clk_hw *rate_hw,
  35. struct clk_rate_request *req,
  36. struct clk_hw *parent_hw,
  37. const struct clk_ops *rate_ops)
  38. {
  39. long rate;
  40. req->best_parent_hw = parent_hw;
  41. req->best_parent_rate = clk_hw_get_rate(parent_hw);
  42. if (rate_ops->determine_rate)
  43. return rate_ops->determine_rate(rate_hw, req);
  44. rate = rate_ops->round_rate(rate_hw, req->rate,
  45. &req->best_parent_rate);
  46. if (rate < 0)
  47. return rate;
  48. req->rate = rate;
  49. return 0;
  50. }
  51. static int clk_composite_determine_rate(struct clk_hw *hw,
  52. struct clk_rate_request *req)
  53. {
  54. struct clk_composite *composite = to_clk_composite(hw);
  55. const struct clk_ops *rate_ops = composite->rate_ops;
  56. const struct clk_ops *mux_ops = composite->mux_ops;
  57. struct clk_hw *rate_hw = composite->rate_hw;
  58. struct clk_hw *mux_hw = composite->mux_hw;
  59. struct clk_hw *parent;
  60. unsigned long rate_diff;
  61. unsigned long best_rate_diff = ULONG_MAX;
  62. unsigned long best_rate = 0;
  63. int i, ret;
  64. if (rate_hw && rate_ops &&
  65. (rate_ops->determine_rate || rate_ops->round_rate) &&
  66. mux_hw && mux_ops && mux_ops->set_parent) {
  67. req->best_parent_hw = NULL;
  68. if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
  69. struct clk_rate_request tmp_req;
  70. parent = clk_hw_get_parent(mux_hw);
  71. clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
  72. ret = clk_composite_determine_rate_for_parent(rate_hw,
  73. &tmp_req,
  74. parent,
  75. rate_ops);
  76. if (ret)
  77. return ret;
  78. req->rate = tmp_req.rate;
  79. req->best_parent_hw = tmp_req.best_parent_hw;
  80. req->best_parent_rate = tmp_req.best_parent_rate;
  81. return 0;
  82. }
  83. for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
  84. struct clk_rate_request tmp_req;
  85. parent = clk_hw_get_parent_by_index(mux_hw, i);
  86. if (!parent)
  87. continue;
  88. clk_hw_forward_rate_request(hw, req, parent, &tmp_req, req->rate);
  89. ret = clk_composite_determine_rate_for_parent(rate_hw,
  90. &tmp_req,
  91. parent,
  92. rate_ops);
  93. if (ret)
  94. continue;
  95. rate_diff = abs(req->rate - tmp_req.rate);
  96. if (!rate_diff || !req->best_parent_hw
  97. || best_rate_diff > rate_diff) {
  98. req->best_parent_hw = parent;
  99. req->best_parent_rate = tmp_req.best_parent_rate;
  100. best_rate_diff = rate_diff;
  101. best_rate = tmp_req.rate;
  102. }
  103. if (!rate_diff)
  104. return 0;
  105. }
  106. req->rate = best_rate;
  107. return 0;
  108. } else if (rate_hw && rate_ops && rate_ops->determine_rate) {
  109. __clk_hw_set_clk(rate_hw, hw);
  110. return rate_ops->determine_rate(rate_hw, req);
  111. } else if (mux_hw && mux_ops && mux_ops->determine_rate) {
  112. __clk_hw_set_clk(mux_hw, hw);
  113. return mux_ops->determine_rate(mux_hw, req);
  114. } else {
  115. pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
  116. return -EINVAL;
  117. }
  118. }
  119. static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
  120. unsigned long *prate)
  121. {
  122. struct clk_composite *composite = to_clk_composite(hw);
  123. const struct clk_ops *rate_ops = composite->rate_ops;
  124. struct clk_hw *rate_hw = composite->rate_hw;
  125. __clk_hw_set_clk(rate_hw, hw);
  126. return rate_ops->round_rate(rate_hw, rate, prate);
  127. }
  128. static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
  129. unsigned long parent_rate)
  130. {
  131. struct clk_composite *composite = to_clk_composite(hw);
  132. const struct clk_ops *rate_ops = composite->rate_ops;
  133. struct clk_hw *rate_hw = composite->rate_hw;
  134. __clk_hw_set_clk(rate_hw, hw);
  135. return rate_ops->set_rate(rate_hw, rate, parent_rate);
  136. }
  137. static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
  138. unsigned long rate,
  139. unsigned long parent_rate,
  140. u8 index)
  141. {
  142. struct clk_composite *composite = to_clk_composite(hw);
  143. const struct clk_ops *rate_ops = composite->rate_ops;
  144. const struct clk_ops *mux_ops = composite->mux_ops;
  145. struct clk_hw *rate_hw = composite->rate_hw;
  146. struct clk_hw *mux_hw = composite->mux_hw;
  147. unsigned long temp_rate;
  148. __clk_hw_set_clk(rate_hw, hw);
  149. __clk_hw_set_clk(mux_hw, hw);
  150. temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
  151. if (temp_rate > rate) {
  152. rate_ops->set_rate(rate_hw, rate, parent_rate);
  153. mux_ops->set_parent(mux_hw, index);
  154. } else {
  155. mux_ops->set_parent(mux_hw, index);
  156. rate_ops->set_rate(rate_hw, rate, parent_rate);
  157. }
  158. return 0;
  159. }
  160. static int clk_composite_is_enabled(struct clk_hw *hw)
  161. {
  162. struct clk_composite *composite = to_clk_composite(hw);
  163. const struct clk_ops *gate_ops = composite->gate_ops;
  164. struct clk_hw *gate_hw = composite->gate_hw;
  165. __clk_hw_set_clk(gate_hw, hw);
  166. return gate_ops->is_enabled(gate_hw);
  167. }
  168. static int clk_composite_enable(struct clk_hw *hw)
  169. {
  170. struct clk_composite *composite = to_clk_composite(hw);
  171. const struct clk_ops *gate_ops = composite->gate_ops;
  172. struct clk_hw *gate_hw = composite->gate_hw;
  173. __clk_hw_set_clk(gate_hw, hw);
  174. return gate_ops->enable(gate_hw);
  175. }
  176. static void clk_composite_disable(struct clk_hw *hw)
  177. {
  178. struct clk_composite *composite = to_clk_composite(hw);
  179. const struct clk_ops *gate_ops = composite->gate_ops;
  180. struct clk_hw *gate_hw = composite->gate_hw;
  181. __clk_hw_set_clk(gate_hw, hw);
  182. gate_ops->disable(gate_hw);
  183. }
  184. static struct clk_hw *__clk_hw_register_composite(struct device *dev,
  185. const char *name, const char * const *parent_names,
  186. const struct clk_parent_data *pdata, int num_parents,
  187. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  188. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  189. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  190. unsigned long flags)
  191. {
  192. struct clk_hw *hw;
  193. struct clk_init_data init = {};
  194. struct clk_composite *composite;
  195. struct clk_ops *clk_composite_ops;
  196. int ret;
  197. composite = kzalloc(sizeof(*composite), GFP_KERNEL);
  198. if (!composite)
  199. return ERR_PTR(-ENOMEM);
  200. init.name = name;
  201. init.flags = flags;
  202. if (parent_names)
  203. init.parent_names = parent_names;
  204. else
  205. init.parent_data = pdata;
  206. init.num_parents = num_parents;
  207. hw = &composite->hw;
  208. clk_composite_ops = &composite->ops;
  209. if (mux_hw && mux_ops) {
  210. if (!mux_ops->get_parent) {
  211. hw = ERR_PTR(-EINVAL);
  212. goto err;
  213. }
  214. composite->mux_hw = mux_hw;
  215. composite->mux_ops = mux_ops;
  216. clk_composite_ops->get_parent = clk_composite_get_parent;
  217. if (mux_ops->set_parent)
  218. clk_composite_ops->set_parent = clk_composite_set_parent;
  219. if (mux_ops->determine_rate)
  220. clk_composite_ops->determine_rate = clk_composite_determine_rate;
  221. }
  222. if (rate_hw && rate_ops) {
  223. if (!rate_ops->recalc_rate) {
  224. hw = ERR_PTR(-EINVAL);
  225. goto err;
  226. }
  227. clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
  228. if (rate_ops->determine_rate)
  229. clk_composite_ops->determine_rate =
  230. clk_composite_determine_rate;
  231. else if (rate_ops->round_rate)
  232. clk_composite_ops->round_rate =
  233. clk_composite_round_rate;
  234. /* .set_rate requires either .round_rate or .determine_rate */
  235. if (rate_ops->set_rate) {
  236. if (rate_ops->determine_rate || rate_ops->round_rate)
  237. clk_composite_ops->set_rate =
  238. clk_composite_set_rate;
  239. else
  240. WARN(1, "%s: missing round_rate op is required\n",
  241. __func__);
  242. }
  243. composite->rate_hw = rate_hw;
  244. composite->rate_ops = rate_ops;
  245. }
  246. if (mux_hw && mux_ops && rate_hw && rate_ops) {
  247. if (mux_ops->set_parent && rate_ops->set_rate)
  248. clk_composite_ops->set_rate_and_parent =
  249. clk_composite_set_rate_and_parent;
  250. }
  251. if (gate_hw && gate_ops) {
  252. if (!gate_ops->is_enabled || !gate_ops->enable ||
  253. !gate_ops->disable) {
  254. hw = ERR_PTR(-EINVAL);
  255. goto err;
  256. }
  257. composite->gate_hw = gate_hw;
  258. composite->gate_ops = gate_ops;
  259. clk_composite_ops->is_enabled = clk_composite_is_enabled;
  260. clk_composite_ops->enable = clk_composite_enable;
  261. clk_composite_ops->disable = clk_composite_disable;
  262. }
  263. init.ops = clk_composite_ops;
  264. composite->hw.init = &init;
  265. ret = clk_hw_register(dev, hw);
  266. if (ret) {
  267. hw = ERR_PTR(ret);
  268. goto err;
  269. }
  270. if (composite->mux_hw)
  271. composite->mux_hw->clk = hw->clk;
  272. if (composite->rate_hw)
  273. composite->rate_hw->clk = hw->clk;
  274. if (composite->gate_hw)
  275. composite->gate_hw->clk = hw->clk;
  276. return hw;
  277. err:
  278. kfree(composite);
  279. return hw;
  280. }
  281. struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
  282. const char * const *parent_names, int num_parents,
  283. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  284. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  285. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  286. unsigned long flags)
  287. {
  288. return __clk_hw_register_composite(dev, name, parent_names, NULL,
  289. num_parents, mux_hw, mux_ops,
  290. rate_hw, rate_ops, gate_hw,
  291. gate_ops, flags);
  292. }
  293. EXPORT_SYMBOL_GPL(clk_hw_register_composite);
  294. struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
  295. const char *name,
  296. const struct clk_parent_data *parent_data,
  297. int num_parents,
  298. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  299. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  300. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  301. unsigned long flags)
  302. {
  303. return __clk_hw_register_composite(dev, name, NULL, parent_data,
  304. num_parents, mux_hw, mux_ops,
  305. rate_hw, rate_ops, gate_hw,
  306. gate_ops, flags);
  307. }
  308. struct clk *clk_register_composite(struct device *dev, const char *name,
  309. const char * const *parent_names, int num_parents,
  310. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  311. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  312. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  313. unsigned long flags)
  314. {
  315. struct clk_hw *hw;
  316. hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
  317. mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
  318. flags);
  319. if (IS_ERR(hw))
  320. return ERR_CAST(hw);
  321. return hw->clk;
  322. }
  323. EXPORT_SYMBOL_GPL(clk_register_composite);
  324. struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
  325. const struct clk_parent_data *parent_data,
  326. int num_parents,
  327. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  328. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  329. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  330. unsigned long flags)
  331. {
  332. struct clk_hw *hw;
  333. hw = clk_hw_register_composite_pdata(dev, name, parent_data,
  334. num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
  335. gate_hw, gate_ops, flags);
  336. if (IS_ERR(hw))
  337. return ERR_CAST(hw);
  338. return hw->clk;
  339. }
  340. void clk_unregister_composite(struct clk *clk)
  341. {
  342. struct clk_composite *composite;
  343. struct clk_hw *hw;
  344. hw = __clk_get_hw(clk);
  345. if (!hw)
  346. return;
  347. composite = to_clk_composite(hw);
  348. clk_unregister(clk);
  349. kfree(composite);
  350. }
  351. void clk_hw_unregister_composite(struct clk_hw *hw)
  352. {
  353. struct clk_composite *composite;
  354. composite = to_clk_composite(hw);
  355. clk_hw_unregister(hw);
  356. kfree(composite);
  357. }
  358. EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
  359. static void devm_clk_hw_release_composite(struct device *dev, void *res)
  360. {
  361. clk_hw_unregister_composite(*(struct clk_hw **)res);
  362. }
  363. static struct clk_hw *__devm_clk_hw_register_composite(struct device *dev,
  364. const char *name, const char * const *parent_names,
  365. const struct clk_parent_data *pdata, int num_parents,
  366. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  367. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  368. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  369. unsigned long flags)
  370. {
  371. struct clk_hw **ptr, *hw;
  372. ptr = devres_alloc(devm_clk_hw_release_composite, sizeof(*ptr),
  373. GFP_KERNEL);
  374. if (!ptr)
  375. return ERR_PTR(-ENOMEM);
  376. hw = __clk_hw_register_composite(dev, name, parent_names, pdata,
  377. num_parents, mux_hw, mux_ops, rate_hw,
  378. rate_ops, gate_hw, gate_ops, flags);
  379. if (!IS_ERR(hw)) {
  380. *ptr = hw;
  381. devres_add(dev, ptr);
  382. } else {
  383. devres_free(ptr);
  384. }
  385. return hw;
  386. }
  387. struct clk_hw *devm_clk_hw_register_composite_pdata(struct device *dev,
  388. const char *name,
  389. const struct clk_parent_data *parent_data,
  390. int num_parents,
  391. struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
  392. struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
  393. struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
  394. unsigned long flags)
  395. {
  396. return __devm_clk_hw_register_composite(dev, name, NULL, parent_data,
  397. num_parents, mux_hw, mux_ops,
  398. rate_hw, rate_ops, gate_hw,
  399. gate_ops, flags);
  400. }