i2c-stm32.c 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * i2c-stm32.c
  4. *
  5. * Copyright (C) M'boumba Cedric Madianga 2017
  6. * Author: M'boumba Cedric Madianga <[email protected]>
  7. */
  8. #include "i2c-stm32.h"
  9. /* Functions for DMA support */
  10. struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev,
  11. dma_addr_t phy_addr,
  12. u32 txdr_offset,
  13. u32 rxdr_offset)
  14. {
  15. struct stm32_i2c_dma *dma;
  16. struct dma_slave_config dma_sconfig;
  17. int ret;
  18. dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
  19. if (!dma)
  20. return ERR_PTR(-ENOMEM);
  21. /* Request and configure I2C TX dma channel */
  22. dma->chan_tx = dma_request_chan(dev, "tx");
  23. if (IS_ERR(dma->chan_tx)) {
  24. ret = PTR_ERR(dma->chan_tx);
  25. if (ret != -ENODEV)
  26. ret = dev_err_probe(dev, ret,
  27. "can't request DMA tx channel\n");
  28. goto fail_al;
  29. }
  30. memset(&dma_sconfig, 0, sizeof(dma_sconfig));
  31. dma_sconfig.dst_addr = phy_addr + txdr_offset;
  32. dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
  33. dma_sconfig.dst_maxburst = 1;
  34. dma_sconfig.direction = DMA_MEM_TO_DEV;
  35. ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
  36. if (ret < 0) {
  37. dev_err(dev, "can't configure tx channel\n");
  38. goto fail_tx;
  39. }
  40. /* Request and configure I2C RX dma channel */
  41. dma->chan_rx = dma_request_chan(dev, "rx");
  42. if (IS_ERR(dma->chan_rx)) {
  43. ret = PTR_ERR(dma->chan_rx);
  44. if (ret != -ENODEV)
  45. ret = dev_err_probe(dev, ret,
  46. "can't request DMA rx channel\n");
  47. goto fail_tx;
  48. }
  49. memset(&dma_sconfig, 0, sizeof(dma_sconfig));
  50. dma_sconfig.src_addr = phy_addr + rxdr_offset;
  51. dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
  52. dma_sconfig.src_maxburst = 1;
  53. dma_sconfig.direction = DMA_DEV_TO_MEM;
  54. ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
  55. if (ret < 0) {
  56. dev_err(dev, "can't configure rx channel\n");
  57. goto fail_rx;
  58. }
  59. init_completion(&dma->dma_complete);
  60. dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n",
  61. dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
  62. return dma;
  63. fail_rx:
  64. dma_release_channel(dma->chan_rx);
  65. fail_tx:
  66. dma_release_channel(dma->chan_tx);
  67. fail_al:
  68. devm_kfree(dev, dma);
  69. return ERR_PTR(ret);
  70. }
  71. void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
  72. {
  73. dma->dma_buf = 0;
  74. dma->dma_len = 0;
  75. dma_release_channel(dma->chan_tx);
  76. dma->chan_tx = NULL;
  77. dma_release_channel(dma->chan_rx);
  78. dma->chan_rx = NULL;
  79. dma->chan_using = NULL;
  80. }
  81. int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
  82. bool rd_wr, u32 len, u8 *buf,
  83. dma_async_tx_callback callback,
  84. void *dma_async_param)
  85. {
  86. struct dma_async_tx_descriptor *txdesc;
  87. struct device *chan_dev;
  88. int ret;
  89. if (rd_wr) {
  90. dma->chan_using = dma->chan_rx;
  91. dma->dma_transfer_dir = DMA_DEV_TO_MEM;
  92. dma->dma_data_dir = DMA_FROM_DEVICE;
  93. } else {
  94. dma->chan_using = dma->chan_tx;
  95. dma->dma_transfer_dir = DMA_MEM_TO_DEV;
  96. dma->dma_data_dir = DMA_TO_DEVICE;
  97. }
  98. dma->dma_len = len;
  99. chan_dev = dma->chan_using->device->dev;
  100. dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len,
  101. dma->dma_data_dir);
  102. if (dma_mapping_error(chan_dev, dma->dma_buf)) {
  103. dev_err(dev, "DMA mapping failed\n");
  104. return -EINVAL;
  105. }
  106. txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
  107. dma->dma_len,
  108. dma->dma_transfer_dir,
  109. DMA_PREP_INTERRUPT);
  110. if (!txdesc) {
  111. dev_err(dev, "Not able to get desc for DMA xfer\n");
  112. ret = -EINVAL;
  113. goto err;
  114. }
  115. reinit_completion(&dma->dma_complete);
  116. txdesc->callback = callback;
  117. txdesc->callback_param = dma_async_param;
  118. ret = dma_submit_error(dmaengine_submit(txdesc));
  119. if (ret < 0) {
  120. dev_err(dev, "DMA submit failed\n");
  121. goto err;
  122. }
  123. dma_async_issue_pending(dma->chan_using);
  124. return 0;
  125. err:
  126. dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len,
  127. dma->dma_data_dir);
  128. return ret;
  129. }