spi-stm32.c 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047
  1. // SPDX-License-Identifier: GPL-2.0
  2. //
  3. // STMicroelectronics STM32 SPI Controller driver (master mode only)
  4. //
  5. // Copyright (C) 2017, STMicroelectronics - All Rights Reserved
  6. // Author(s): Amelie Delaunay <[email protected]> for STMicroelectronics.
  7. #include <linux/bitfield.h>
  8. #include <linux/debugfs.h>
  9. #include <linux/clk.h>
  10. #include <linux/delay.h>
  11. #include <linux/dmaengine.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/iopoll.h>
  14. #include <linux/module.h>
  15. #include <linux/of_platform.h>
  16. #include <linux/pinctrl/consumer.h>
  17. #include <linux/pm_runtime.h>
  18. #include <linux/reset.h>
  19. #include <linux/spi/spi.h>
  20. #define DRIVER_NAME "spi_stm32"
  21. /* STM32F4 SPI registers */
  22. #define STM32F4_SPI_CR1 0x00
  23. #define STM32F4_SPI_CR2 0x04
  24. #define STM32F4_SPI_SR 0x08
  25. #define STM32F4_SPI_DR 0x0C
  26. #define STM32F4_SPI_I2SCFGR 0x1C
  27. /* STM32F4_SPI_CR1 bit fields */
  28. #define STM32F4_SPI_CR1_CPHA BIT(0)
  29. #define STM32F4_SPI_CR1_CPOL BIT(1)
  30. #define STM32F4_SPI_CR1_MSTR BIT(2)
  31. #define STM32F4_SPI_CR1_BR_SHIFT 3
  32. #define STM32F4_SPI_CR1_BR GENMASK(5, 3)
  33. #define STM32F4_SPI_CR1_SPE BIT(6)
  34. #define STM32F4_SPI_CR1_LSBFRST BIT(7)
  35. #define STM32F4_SPI_CR1_SSI BIT(8)
  36. #define STM32F4_SPI_CR1_SSM BIT(9)
  37. #define STM32F4_SPI_CR1_RXONLY BIT(10)
  38. #define STM32F4_SPI_CR1_DFF BIT(11)
  39. #define STM32F4_SPI_CR1_CRCNEXT BIT(12)
  40. #define STM32F4_SPI_CR1_CRCEN BIT(13)
  41. #define STM32F4_SPI_CR1_BIDIOE BIT(14)
  42. #define STM32F4_SPI_CR1_BIDIMODE BIT(15)
  43. #define STM32F4_SPI_CR1_BR_MIN 0
  44. #define STM32F4_SPI_CR1_BR_MAX (GENMASK(5, 3) >> 3)
  45. /* STM32F4_SPI_CR2 bit fields */
  46. #define STM32F4_SPI_CR2_RXDMAEN BIT(0)
  47. #define STM32F4_SPI_CR2_TXDMAEN BIT(1)
  48. #define STM32F4_SPI_CR2_SSOE BIT(2)
  49. #define STM32F4_SPI_CR2_FRF BIT(4)
  50. #define STM32F4_SPI_CR2_ERRIE BIT(5)
  51. #define STM32F4_SPI_CR2_RXNEIE BIT(6)
  52. #define STM32F4_SPI_CR2_TXEIE BIT(7)
  53. /* STM32F4_SPI_SR bit fields */
  54. #define STM32F4_SPI_SR_RXNE BIT(0)
  55. #define STM32F4_SPI_SR_TXE BIT(1)
  56. #define STM32F4_SPI_SR_CHSIDE BIT(2)
  57. #define STM32F4_SPI_SR_UDR BIT(3)
  58. #define STM32F4_SPI_SR_CRCERR BIT(4)
  59. #define STM32F4_SPI_SR_MODF BIT(5)
  60. #define STM32F4_SPI_SR_OVR BIT(6)
  61. #define STM32F4_SPI_SR_BSY BIT(7)
  62. #define STM32F4_SPI_SR_FRE BIT(8)
  63. /* STM32F4_SPI_I2SCFGR bit fields */
  64. #define STM32F4_SPI_I2SCFGR_I2SMOD BIT(11)
  65. /* STM32F4 SPI Baud Rate min/max divisor */
  66. #define STM32F4_SPI_BR_DIV_MIN (2 << STM32F4_SPI_CR1_BR_MIN)
  67. #define STM32F4_SPI_BR_DIV_MAX (2 << STM32F4_SPI_CR1_BR_MAX)
  68. /* STM32H7 SPI registers */
  69. #define STM32H7_SPI_CR1 0x00
  70. #define STM32H7_SPI_CR2 0x04
  71. #define STM32H7_SPI_CFG1 0x08
  72. #define STM32H7_SPI_CFG2 0x0C
  73. #define STM32H7_SPI_IER 0x10
  74. #define STM32H7_SPI_SR 0x14
  75. #define STM32H7_SPI_IFCR 0x18
  76. #define STM32H7_SPI_TXDR 0x20
  77. #define STM32H7_SPI_RXDR 0x30
  78. #define STM32H7_SPI_I2SCFGR 0x50
  79. /* STM32H7_SPI_CR1 bit fields */
  80. #define STM32H7_SPI_CR1_SPE BIT(0)
  81. #define STM32H7_SPI_CR1_MASRX BIT(8)
  82. #define STM32H7_SPI_CR1_CSTART BIT(9)
  83. #define STM32H7_SPI_CR1_CSUSP BIT(10)
  84. #define STM32H7_SPI_CR1_HDDIR BIT(11)
  85. #define STM32H7_SPI_CR1_SSI BIT(12)
  86. /* STM32H7_SPI_CR2 bit fields */
  87. #define STM32H7_SPI_CR2_TSIZE GENMASK(15, 0)
  88. #define STM32H7_SPI_TSIZE_MAX GENMASK(15, 0)
  89. /* STM32H7_SPI_CFG1 bit fields */
  90. #define STM32H7_SPI_CFG1_DSIZE GENMASK(4, 0)
  91. #define STM32H7_SPI_CFG1_FTHLV GENMASK(8, 5)
  92. #define STM32H7_SPI_CFG1_RXDMAEN BIT(14)
  93. #define STM32H7_SPI_CFG1_TXDMAEN BIT(15)
  94. #define STM32H7_SPI_CFG1_MBR GENMASK(30, 28)
  95. #define STM32H7_SPI_CFG1_MBR_SHIFT 28
  96. #define STM32H7_SPI_CFG1_MBR_MIN 0
  97. #define STM32H7_SPI_CFG1_MBR_MAX (GENMASK(30, 28) >> 28)
  98. /* STM32H7_SPI_CFG2 bit fields */
  99. #define STM32H7_SPI_CFG2_MIDI GENMASK(7, 4)
  100. #define STM32H7_SPI_CFG2_COMM GENMASK(18, 17)
  101. #define STM32H7_SPI_CFG2_SP GENMASK(21, 19)
  102. #define STM32H7_SPI_CFG2_MASTER BIT(22)
  103. #define STM32H7_SPI_CFG2_LSBFRST BIT(23)
  104. #define STM32H7_SPI_CFG2_CPHA BIT(24)
  105. #define STM32H7_SPI_CFG2_CPOL BIT(25)
  106. #define STM32H7_SPI_CFG2_SSM BIT(26)
  107. #define STM32H7_SPI_CFG2_AFCNTR BIT(31)
  108. /* STM32H7_SPI_IER bit fields */
  109. #define STM32H7_SPI_IER_RXPIE BIT(0)
  110. #define STM32H7_SPI_IER_TXPIE BIT(1)
  111. #define STM32H7_SPI_IER_DXPIE BIT(2)
  112. #define STM32H7_SPI_IER_EOTIE BIT(3)
  113. #define STM32H7_SPI_IER_TXTFIE BIT(4)
  114. #define STM32H7_SPI_IER_OVRIE BIT(6)
  115. #define STM32H7_SPI_IER_MODFIE BIT(9)
  116. #define STM32H7_SPI_IER_ALL GENMASK(10, 0)
  117. /* STM32H7_SPI_SR bit fields */
  118. #define STM32H7_SPI_SR_RXP BIT(0)
  119. #define STM32H7_SPI_SR_TXP BIT(1)
  120. #define STM32H7_SPI_SR_EOT BIT(3)
  121. #define STM32H7_SPI_SR_OVR BIT(6)
  122. #define STM32H7_SPI_SR_MODF BIT(9)
  123. #define STM32H7_SPI_SR_SUSP BIT(11)
  124. #define STM32H7_SPI_SR_RXPLVL GENMASK(14, 13)
  125. #define STM32H7_SPI_SR_RXWNE BIT(15)
  126. /* STM32H7_SPI_IFCR bit fields */
  127. #define STM32H7_SPI_IFCR_ALL GENMASK(11, 3)
  128. /* STM32H7_SPI_I2SCFGR bit fields */
  129. #define STM32H7_SPI_I2SCFGR_I2SMOD BIT(0)
  130. /* STM32H7 SPI Master Baud Rate min/max divisor */
  131. #define STM32H7_SPI_MBR_DIV_MIN (2 << STM32H7_SPI_CFG1_MBR_MIN)
  132. #define STM32H7_SPI_MBR_DIV_MAX (2 << STM32H7_SPI_CFG1_MBR_MAX)
  133. /* STM32H7 SPI Communication mode */
  134. #define STM32H7_SPI_FULL_DUPLEX 0
  135. #define STM32H7_SPI_SIMPLEX_TX 1
  136. #define STM32H7_SPI_SIMPLEX_RX 2
  137. #define STM32H7_SPI_HALF_DUPLEX 3
  138. /* SPI Communication type */
  139. #define SPI_FULL_DUPLEX 0
  140. #define SPI_SIMPLEX_TX 1
  141. #define SPI_SIMPLEX_RX 2
  142. #define SPI_3WIRE_TX 3
  143. #define SPI_3WIRE_RX 4
  144. #define STM32_SPI_AUTOSUSPEND_DELAY 1 /* 1 ms */
  145. /*
  146. * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers
  147. * without fifo buffers.
  148. */
  149. #define SPI_DMA_MIN_BYTES 16
  150. /**
  151. * struct stm32_spi_reg - stm32 SPI register & bitfield desc
  152. * @reg: register offset
  153. * @mask: bitfield mask
  154. * @shift: left shift
  155. */
  156. struct stm32_spi_reg {
  157. int reg;
  158. int mask;
  159. int shift;
  160. };
  161. /**
  162. * struct stm32_spi_regspec - stm32 registers definition, compatible dependent data
  163. * @en: enable register and SPI enable bit
  164. * @dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit
  165. * @dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit
  166. * @cpol: clock polarity register and polarity bit
  167. * @cpha: clock phase register and phase bit
  168. * @lsb_first: LSB transmitted first register and bit
  169. * @br: baud rate register and bitfields
  170. * @rx: SPI RX data register
  171. * @tx: SPI TX data register
  172. */
  173. struct stm32_spi_regspec {
  174. const struct stm32_spi_reg en;
  175. const struct stm32_spi_reg dma_rx_en;
  176. const struct stm32_spi_reg dma_tx_en;
  177. const struct stm32_spi_reg cpol;
  178. const struct stm32_spi_reg cpha;
  179. const struct stm32_spi_reg lsb_first;
  180. const struct stm32_spi_reg br;
  181. const struct stm32_spi_reg rx;
  182. const struct stm32_spi_reg tx;
  183. };
  184. struct stm32_spi;
  185. /**
  186. * struct stm32_spi_cfg - stm32 compatible configuration data
  187. * @regs: registers descriptions
  188. * @get_fifo_size: routine to get fifo size
  189. * @get_bpw_mask: routine to get bits per word mask
  190. * @disable: routine to disable controller
  191. * @config: routine to configure controller as SPI Master
  192. * @set_bpw: routine to configure registers to for bits per word
  193. * @set_mode: routine to configure registers to desired mode
  194. * @set_data_idleness: optional routine to configure registers to desired idle
  195. * time between frames (if driver has this functionality)
  196. * @set_number_of_data: optional routine to configure registers to desired
  197. * number of data (if driver has this functionality)
  198. * @transfer_one_dma_start: routine to start transfer a single spi_transfer
  199. * using DMA
  200. * @dma_rx_cb: routine to call after DMA RX channel operation is complete
  201. * @dma_tx_cb: routine to call after DMA TX channel operation is complete
  202. * @transfer_one_irq: routine to configure interrupts for driver
  203. * @irq_handler_event: Interrupt handler for SPI controller events
  204. * @irq_handler_thread: thread of interrupt handler for SPI controller
  205. * @baud_rate_div_min: minimum baud rate divisor
  206. * @baud_rate_div_max: maximum baud rate divisor
  207. * @has_fifo: boolean to know if fifo is used for driver
  208. * @flags: compatible specific SPI controller flags used at registration time
  209. */
  210. struct stm32_spi_cfg {
  211. const struct stm32_spi_regspec *regs;
  212. int (*get_fifo_size)(struct stm32_spi *spi);
  213. int (*get_bpw_mask)(struct stm32_spi *spi);
  214. void (*disable)(struct stm32_spi *spi);
  215. int (*config)(struct stm32_spi *spi);
  216. void (*set_bpw)(struct stm32_spi *spi);
  217. int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type);
  218. void (*set_data_idleness)(struct stm32_spi *spi, u32 length);
  219. int (*set_number_of_data)(struct stm32_spi *spi, u32 length);
  220. void (*transfer_one_dma_start)(struct stm32_spi *spi);
  221. void (*dma_rx_cb)(void *data);
  222. void (*dma_tx_cb)(void *data);
  223. int (*transfer_one_irq)(struct stm32_spi *spi);
  224. irqreturn_t (*irq_handler_event)(int irq, void *dev_id);
  225. irqreturn_t (*irq_handler_thread)(int irq, void *dev_id);
  226. unsigned int baud_rate_div_min;
  227. unsigned int baud_rate_div_max;
  228. bool has_fifo;
  229. u16 flags;
  230. };
  231. /**
  232. * struct stm32_spi - private data of the SPI controller
  233. * @dev: driver model representation of the controller
  234. * @master: controller master interface
  235. * @cfg: compatible configuration data
  236. * @base: virtual memory area
  237. * @clk: hw kernel clock feeding the SPI clock generator
  238. * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator
  239. * @lock: prevent I/O concurrent access
  240. * @irq: SPI controller interrupt line
  241. * @fifo_size: size of the embedded fifo in bytes
  242. * @cur_midi: master inter-data idleness in ns
  243. * @cur_speed: speed configured in Hz
  244. * @cur_half_period: time of a half bit in us
  245. * @cur_bpw: number of bits in a single SPI data frame
  246. * @cur_fthlv: fifo threshold level (data frames in a single data packet)
  247. * @cur_comm: SPI communication mode
  248. * @cur_xferlen: current transfer length in bytes
  249. * @cur_usedma: boolean to know if dma is used in current transfer
  250. * @tx_buf: data to be written, or NULL
  251. * @rx_buf: data to be read, or NULL
  252. * @tx_len: number of data to be written in bytes
  253. * @rx_len: number of data to be read in bytes
  254. * @dma_tx: dma channel for TX transfer
  255. * @dma_rx: dma channel for RX transfer
  256. * @phys_addr: SPI registers physical base address
  257. */
  258. struct stm32_spi {
  259. struct device *dev;
  260. struct spi_master *master;
  261. const struct stm32_spi_cfg *cfg;
  262. void __iomem *base;
  263. struct clk *clk;
  264. u32 clk_rate;
  265. spinlock_t lock; /* prevent I/O concurrent access */
  266. int irq;
  267. unsigned int fifo_size;
  268. unsigned int cur_midi;
  269. unsigned int cur_speed;
  270. unsigned int cur_half_period;
  271. unsigned int cur_bpw;
  272. unsigned int cur_fthlv;
  273. unsigned int cur_comm;
  274. unsigned int cur_xferlen;
  275. bool cur_usedma;
  276. const void *tx_buf;
  277. void *rx_buf;
  278. int tx_len;
  279. int rx_len;
  280. struct dma_chan *dma_tx;
  281. struct dma_chan *dma_rx;
  282. dma_addr_t phys_addr;
  283. };
  284. static const struct stm32_spi_regspec stm32f4_spi_regspec = {
  285. .en = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE },
  286. .dma_rx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_RXDMAEN },
  287. .dma_tx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN },
  288. .cpol = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPOL },
  289. .cpha = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPHA },
  290. .lsb_first = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_LSBFRST },
  291. .br = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_BR, STM32F4_SPI_CR1_BR_SHIFT },
  292. .rx = { STM32F4_SPI_DR },
  293. .tx = { STM32F4_SPI_DR },
  294. };
  295. static const struct stm32_spi_regspec stm32h7_spi_regspec = {
  296. /* SPI data transfer is enabled but spi_ker_ck is idle.
  297. * CFG1 and CFG2 registers are write protected when SPE is enabled.
  298. */
  299. .en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE },
  300. .dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN },
  301. .dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN },
  302. .cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL },
  303. .cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA },
  304. .lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST },
  305. .br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR,
  306. STM32H7_SPI_CFG1_MBR_SHIFT },
  307. .rx = { STM32H7_SPI_RXDR },
  308. .tx = { STM32H7_SPI_TXDR },
  309. };
  310. static inline void stm32_spi_set_bits(struct stm32_spi *spi,
  311. u32 offset, u32 bits)
  312. {
  313. writel_relaxed(readl_relaxed(spi->base + offset) | bits,
  314. spi->base + offset);
  315. }
  316. static inline void stm32_spi_clr_bits(struct stm32_spi *spi,
  317. u32 offset, u32 bits)
  318. {
  319. writel_relaxed(readl_relaxed(spi->base + offset) & ~bits,
  320. spi->base + offset);
  321. }
  322. /**
  323. * stm32h7_spi_get_fifo_size - Return fifo size
  324. * @spi: pointer to the spi controller data structure
  325. */
  326. static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi)
  327. {
  328. unsigned long flags;
  329. u32 count = 0;
  330. spin_lock_irqsave(&spi->lock, flags);
  331. stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
  332. while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP)
  333. writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR);
  334. stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
  335. spin_unlock_irqrestore(&spi->lock, flags);
  336. dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count);
  337. return count;
  338. }
  339. /**
  340. * stm32f4_spi_get_bpw_mask - Return bits per word mask
  341. * @spi: pointer to the spi controller data structure
  342. */
  343. static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi)
  344. {
  345. dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n");
  346. return SPI_BPW_MASK(8) | SPI_BPW_MASK(16);
  347. }
  348. /**
  349. * stm32h7_spi_get_bpw_mask - Return bits per word mask
  350. * @spi: pointer to the spi controller data structure
  351. */
  352. static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi)
  353. {
  354. unsigned long flags;
  355. u32 cfg1, max_bpw;
  356. spin_lock_irqsave(&spi->lock, flags);
  357. /*
  358. * The most significant bit at DSIZE bit field is reserved when the
  359. * maximum data size of periperal instances is limited to 16-bit
  360. */
  361. stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE);
  362. cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1);
  363. max_bpw = FIELD_GET(STM32H7_SPI_CFG1_DSIZE, cfg1) + 1;
  364. spin_unlock_irqrestore(&spi->lock, flags);
  365. dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw);
  366. return SPI_BPW_RANGE_MASK(4, max_bpw);
  367. }
  368. /**
  369. * stm32_spi_prepare_mbr - Determine baud rate divisor value
  370. * @spi: pointer to the spi controller data structure
  371. * @speed_hz: requested speed
  372. * @min_div: minimum baud rate divisor
  373. * @max_div: maximum baud rate divisor
  374. *
  375. * Return baud rate divisor value in case of success or -EINVAL
  376. */
  377. static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz,
  378. u32 min_div, u32 max_div)
  379. {
  380. u32 div, mbrdiv;
  381. /* Ensure spi->clk_rate is even */
  382. div = DIV_ROUND_CLOSEST(spi->clk_rate & ~0x1, speed_hz);
  383. /*
  384. * SPI framework set xfer->speed_hz to master->max_speed_hz if
  385. * xfer->speed_hz is greater than master->max_speed_hz, and it returns
  386. * an error when xfer->speed_hz is lower than master->min_speed_hz, so
  387. * no need to check it there.
  388. * However, we need to ensure the following calculations.
  389. */
  390. if ((div < min_div) || (div > max_div))
  391. return -EINVAL;
  392. /* Determine the first power of 2 greater than or equal to div */
  393. if (div & (div - 1))
  394. mbrdiv = fls(div);
  395. else
  396. mbrdiv = fls(div) - 1;
  397. spi->cur_speed = spi->clk_rate / (1 << mbrdiv);
  398. spi->cur_half_period = DIV_ROUND_CLOSEST(USEC_PER_SEC, 2 * spi->cur_speed);
  399. return mbrdiv - 1;
  400. }
  401. /**
  402. * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level
  403. * @spi: pointer to the spi controller data structure
  404. * @xfer_len: length of the message to be transferred
  405. */
  406. static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len)
  407. {
  408. u32 packet, bpw;
  409. /* data packet should not exceed 1/2 of fifo space */
  410. packet = clamp(xfer_len, 1U, spi->fifo_size / 2);
  411. /* align packet size with data registers access */
  412. bpw = DIV_ROUND_UP(spi->cur_bpw, 8);
  413. return DIV_ROUND_UP(packet, bpw);
  414. }
  415. /**
  416. * stm32f4_spi_write_tx - Write bytes to Transmit Data Register
  417. * @spi: pointer to the spi controller data structure
  418. *
  419. * Read from tx_buf depends on remaining bytes to avoid to read beyond
  420. * tx_buf end.
  421. */
  422. static void stm32f4_spi_write_tx(struct stm32_spi *spi)
  423. {
  424. if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) &
  425. STM32F4_SPI_SR_TXE)) {
  426. u32 offs = spi->cur_xferlen - spi->tx_len;
  427. if (spi->cur_bpw == 16) {
  428. const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
  429. writew_relaxed(*tx_buf16, spi->base + STM32F4_SPI_DR);
  430. spi->tx_len -= sizeof(u16);
  431. } else {
  432. const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
  433. writeb_relaxed(*tx_buf8, spi->base + STM32F4_SPI_DR);
  434. spi->tx_len -= sizeof(u8);
  435. }
  436. }
  437. dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
  438. }
  439. /**
  440. * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register
  441. * @spi: pointer to the spi controller data structure
  442. *
  443. * Read from tx_buf depends on remaining bytes to avoid to read beyond
  444. * tx_buf end.
  445. */
  446. static void stm32h7_spi_write_txfifo(struct stm32_spi *spi)
  447. {
  448. while ((spi->tx_len > 0) &&
  449. (readl_relaxed(spi->base + STM32H7_SPI_SR) &
  450. STM32H7_SPI_SR_TXP)) {
  451. u32 offs = spi->cur_xferlen - spi->tx_len;
  452. if (spi->tx_len >= sizeof(u32)) {
  453. const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs);
  454. writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR);
  455. spi->tx_len -= sizeof(u32);
  456. } else if (spi->tx_len >= sizeof(u16)) {
  457. const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
  458. writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR);
  459. spi->tx_len -= sizeof(u16);
  460. } else {
  461. const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
  462. writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR);
  463. spi->tx_len -= sizeof(u8);
  464. }
  465. }
  466. dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
  467. }
  468. /**
  469. * stm32f4_spi_read_rx - Read bytes from Receive Data Register
  470. * @spi: pointer to the spi controller data structure
  471. *
  472. * Write in rx_buf depends on remaining bytes to avoid to write beyond
  473. * rx_buf end.
  474. */
  475. static void stm32f4_spi_read_rx(struct stm32_spi *spi)
  476. {
  477. if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) &
  478. STM32F4_SPI_SR_RXNE)) {
  479. u32 offs = spi->cur_xferlen - spi->rx_len;
  480. if (spi->cur_bpw == 16) {
  481. u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
  482. *rx_buf16 = readw_relaxed(spi->base + STM32F4_SPI_DR);
  483. spi->rx_len -= sizeof(u16);
  484. } else {
  485. u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
  486. *rx_buf8 = readb_relaxed(spi->base + STM32F4_SPI_DR);
  487. spi->rx_len -= sizeof(u8);
  488. }
  489. }
  490. dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len);
  491. }
  492. /**
  493. * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register
  494. * @spi: pointer to the spi controller data structure
  495. *
  496. * Write in rx_buf depends on remaining bytes to avoid to write beyond
  497. * rx_buf end.
  498. */
  499. static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi)
  500. {
  501. u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
  502. u32 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
  503. while ((spi->rx_len > 0) &&
  504. ((sr & STM32H7_SPI_SR_RXP) ||
  505. ((sr & STM32H7_SPI_SR_EOT) &&
  506. ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) {
  507. u32 offs = spi->cur_xferlen - spi->rx_len;
  508. if ((spi->rx_len >= sizeof(u32)) ||
  509. (sr & STM32H7_SPI_SR_RXWNE)) {
  510. u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs);
  511. *rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR);
  512. spi->rx_len -= sizeof(u32);
  513. } else if ((spi->rx_len >= sizeof(u16)) ||
  514. (!(sr & STM32H7_SPI_SR_RXWNE) &&
  515. (rxplvl >= 2 || spi->cur_bpw > 8))) {
  516. u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
  517. *rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR);
  518. spi->rx_len -= sizeof(u16);
  519. } else {
  520. u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
  521. *rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR);
  522. spi->rx_len -= sizeof(u8);
  523. }
  524. sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
  525. rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
  526. }
  527. dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n",
  528. __func__, spi->rx_len, sr);
  529. }
  530. /**
  531. * stm32_spi_enable - Enable SPI controller
  532. * @spi: pointer to the spi controller data structure
  533. */
  534. static void stm32_spi_enable(struct stm32_spi *spi)
  535. {
  536. dev_dbg(spi->dev, "enable controller\n");
  537. stm32_spi_set_bits(spi, spi->cfg->regs->en.reg,
  538. spi->cfg->regs->en.mask);
  539. }
  540. /**
  541. * stm32f4_spi_disable - Disable SPI controller
  542. * @spi: pointer to the spi controller data structure
  543. */
  544. static void stm32f4_spi_disable(struct stm32_spi *spi)
  545. {
  546. unsigned long flags;
  547. u32 sr;
  548. dev_dbg(spi->dev, "disable controller\n");
  549. spin_lock_irqsave(&spi->lock, flags);
  550. if (!(readl_relaxed(spi->base + STM32F4_SPI_CR1) &
  551. STM32F4_SPI_CR1_SPE)) {
  552. spin_unlock_irqrestore(&spi->lock, flags);
  553. return;
  554. }
  555. /* Disable interrupts */
  556. stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXEIE |
  557. STM32F4_SPI_CR2_RXNEIE |
  558. STM32F4_SPI_CR2_ERRIE);
  559. /* Wait until BSY = 0 */
  560. if (readl_relaxed_poll_timeout_atomic(spi->base + STM32F4_SPI_SR,
  561. sr, !(sr & STM32F4_SPI_SR_BSY),
  562. 10, 100000) < 0) {
  563. dev_warn(spi->dev, "disabling condition timeout\n");
  564. }
  565. if (spi->cur_usedma && spi->dma_tx)
  566. dmaengine_terminate_all(spi->dma_tx);
  567. if (spi->cur_usedma && spi->dma_rx)
  568. dmaengine_terminate_all(spi->dma_rx);
  569. stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE);
  570. stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN |
  571. STM32F4_SPI_CR2_RXDMAEN);
  572. /* Sequence to clear OVR flag */
  573. readl_relaxed(spi->base + STM32F4_SPI_DR);
  574. readl_relaxed(spi->base + STM32F4_SPI_SR);
  575. spin_unlock_irqrestore(&spi->lock, flags);
  576. }
  577. /**
  578. * stm32h7_spi_disable - Disable SPI controller
  579. * @spi: pointer to the spi controller data structure
  580. *
  581. * RX-Fifo is flushed when SPI controller is disabled.
  582. */
  583. static void stm32h7_spi_disable(struct stm32_spi *spi)
  584. {
  585. unsigned long flags;
  586. u32 cr1;
  587. dev_dbg(spi->dev, "disable controller\n");
  588. spin_lock_irqsave(&spi->lock, flags);
  589. cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1);
  590. if (!(cr1 & STM32H7_SPI_CR1_SPE)) {
  591. spin_unlock_irqrestore(&spi->lock, flags);
  592. return;
  593. }
  594. /* Add a delay to make sure that transmission is ended. */
  595. if (spi->cur_half_period)
  596. udelay(spi->cur_half_period);
  597. if (spi->cur_usedma && spi->dma_tx)
  598. dmaengine_terminate_all(spi->dma_tx);
  599. if (spi->cur_usedma && spi->dma_rx)
  600. dmaengine_terminate_all(spi->dma_rx);
  601. stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
  602. stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN |
  603. STM32H7_SPI_CFG1_RXDMAEN);
  604. /* Disable interrupts and clear status flags */
  605. writel_relaxed(0, spi->base + STM32H7_SPI_IER);
  606. writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR);
  607. spin_unlock_irqrestore(&spi->lock, flags);
  608. }
  609. /**
  610. * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use
  611. * @master: controller master interface
  612. * @spi_dev: pointer to the spi device
  613. * @transfer: pointer to spi transfer
  614. *
  615. * If driver has fifo and the current transfer size is greater than fifo size,
  616. * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes.
  617. */
  618. static bool stm32_spi_can_dma(struct spi_master *master,
  619. struct spi_device *spi_dev,
  620. struct spi_transfer *transfer)
  621. {
  622. unsigned int dma_size;
  623. struct stm32_spi *spi = spi_master_get_devdata(master);
  624. if (spi->cfg->has_fifo)
  625. dma_size = spi->fifo_size;
  626. else
  627. dma_size = SPI_DMA_MIN_BYTES;
  628. dev_dbg(spi->dev, "%s: %s\n", __func__,
  629. (transfer->len > dma_size) ? "true" : "false");
  630. return (transfer->len > dma_size);
  631. }
  632. /**
  633. * stm32f4_spi_irq_event - Interrupt handler for SPI controller events
  634. * @irq: interrupt line
  635. * @dev_id: SPI controller master interface
  636. */
  637. static irqreturn_t stm32f4_spi_irq_event(int irq, void *dev_id)
  638. {
  639. struct spi_master *master = dev_id;
  640. struct stm32_spi *spi = spi_master_get_devdata(master);
  641. u32 sr, mask = 0;
  642. bool end = false;
  643. spin_lock(&spi->lock);
  644. sr = readl_relaxed(spi->base + STM32F4_SPI_SR);
  645. /*
  646. * BSY flag is not handled in interrupt but it is normal behavior when
  647. * this flag is set.
  648. */
  649. sr &= ~STM32F4_SPI_SR_BSY;
  650. if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX ||
  651. spi->cur_comm == SPI_3WIRE_TX)) {
  652. /* OVR flag shouldn't be handled for TX only mode */
  653. sr &= ~(STM32F4_SPI_SR_OVR | STM32F4_SPI_SR_RXNE);
  654. mask |= STM32F4_SPI_SR_TXE;
  655. }
  656. if (!spi->cur_usedma && (spi->cur_comm == SPI_FULL_DUPLEX ||
  657. spi->cur_comm == SPI_SIMPLEX_RX ||
  658. spi->cur_comm == SPI_3WIRE_RX)) {
  659. /* TXE flag is set and is handled when RXNE flag occurs */
  660. sr &= ~STM32F4_SPI_SR_TXE;
  661. mask |= STM32F4_SPI_SR_RXNE | STM32F4_SPI_SR_OVR;
  662. }
  663. if (!(sr & mask)) {
  664. dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr);
  665. spin_unlock(&spi->lock);
  666. return IRQ_NONE;
  667. }
  668. if (sr & STM32F4_SPI_SR_OVR) {
  669. dev_warn(spi->dev, "Overrun: received value discarded\n");
  670. /* Sequence to clear OVR flag */
  671. readl_relaxed(spi->base + STM32F4_SPI_DR);
  672. readl_relaxed(spi->base + STM32F4_SPI_SR);
  673. /*
  674. * If overrun is detected, it means that something went wrong,
  675. * so stop the current transfer. Transfer can wait for next
  676. * RXNE but DR is already read and end never happens.
  677. */
  678. end = true;
  679. goto end_irq;
  680. }
  681. if (sr & STM32F4_SPI_SR_TXE) {
  682. if (spi->tx_buf)
  683. stm32f4_spi_write_tx(spi);
  684. if (spi->tx_len == 0)
  685. end = true;
  686. }
  687. if (sr & STM32F4_SPI_SR_RXNE) {
  688. stm32f4_spi_read_rx(spi);
  689. if (spi->rx_len == 0)
  690. end = true;
  691. else if (spi->tx_buf)/* Load data for discontinuous mode */
  692. stm32f4_spi_write_tx(spi);
  693. }
  694. end_irq:
  695. if (end) {
  696. /* Immediately disable interrupts to do not generate new one */
  697. stm32_spi_clr_bits(spi, STM32F4_SPI_CR2,
  698. STM32F4_SPI_CR2_TXEIE |
  699. STM32F4_SPI_CR2_RXNEIE |
  700. STM32F4_SPI_CR2_ERRIE);
  701. spin_unlock(&spi->lock);
  702. return IRQ_WAKE_THREAD;
  703. }
  704. spin_unlock(&spi->lock);
  705. return IRQ_HANDLED;
  706. }
  707. /**
  708. * stm32f4_spi_irq_thread - Thread of interrupt handler for SPI controller
  709. * @irq: interrupt line
  710. * @dev_id: SPI controller master interface
  711. */
  712. static irqreturn_t stm32f4_spi_irq_thread(int irq, void *dev_id)
  713. {
  714. struct spi_master *master = dev_id;
  715. struct stm32_spi *spi = spi_master_get_devdata(master);
  716. spi_finalize_current_transfer(master);
  717. stm32f4_spi_disable(spi);
  718. return IRQ_HANDLED;
  719. }
  720. /**
  721. * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller
  722. * @irq: interrupt line
  723. * @dev_id: SPI controller master interface
  724. */
  725. static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id)
  726. {
  727. struct spi_master *master = dev_id;
  728. struct stm32_spi *spi = spi_master_get_devdata(master);
  729. u32 sr, ier, mask;
  730. unsigned long flags;
  731. bool end = false;
  732. spin_lock_irqsave(&spi->lock, flags);
  733. sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
  734. ier = readl_relaxed(spi->base + STM32H7_SPI_IER);
  735. mask = ier;
  736. /*
  737. * EOTIE enables irq from EOT, SUSP and TXC events. We need to set
  738. * SUSP to acknowledge it later. TXC is automatically cleared
  739. */
  740. mask |= STM32H7_SPI_SR_SUSP;
  741. /*
  742. * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP
  743. * are set. So in case of Full-Duplex, need to poll TXP and RXP event.
  744. */
  745. if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma)
  746. mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP;
  747. if (!(sr & mask)) {
  748. dev_warn(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n",
  749. sr, ier);
  750. spin_unlock_irqrestore(&spi->lock, flags);
  751. return IRQ_NONE;
  752. }
  753. if (sr & STM32H7_SPI_SR_SUSP) {
  754. static DEFINE_RATELIMIT_STATE(rs,
  755. DEFAULT_RATELIMIT_INTERVAL * 10,
  756. 1);
  757. ratelimit_set_flags(&rs, RATELIMIT_MSG_ON_RELEASE);
  758. if (__ratelimit(&rs))
  759. dev_dbg_ratelimited(spi->dev, "Communication suspended\n");
  760. if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
  761. stm32h7_spi_read_rxfifo(spi);
  762. /*
  763. * If communication is suspended while using DMA, it means
  764. * that something went wrong, so stop the current transfer
  765. */
  766. if (spi->cur_usedma)
  767. end = true;
  768. }
  769. if (sr & STM32H7_SPI_SR_MODF) {
  770. dev_warn(spi->dev, "Mode fault: transfer aborted\n");
  771. end = true;
  772. }
  773. if (sr & STM32H7_SPI_SR_OVR) {
  774. dev_err(spi->dev, "Overrun: RX data lost\n");
  775. end = true;
  776. }
  777. if (sr & STM32H7_SPI_SR_EOT) {
  778. if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
  779. stm32h7_spi_read_rxfifo(spi);
  780. if (!spi->cur_usedma ||
  781. (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX))
  782. end = true;
  783. }
  784. if (sr & STM32H7_SPI_SR_TXP)
  785. if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0)))
  786. stm32h7_spi_write_txfifo(spi);
  787. if (sr & STM32H7_SPI_SR_RXP)
  788. if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
  789. stm32h7_spi_read_rxfifo(spi);
  790. writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR);
  791. spin_unlock_irqrestore(&spi->lock, flags);
  792. if (end) {
  793. stm32h7_spi_disable(spi);
  794. spi_finalize_current_transfer(master);
  795. }
  796. return IRQ_HANDLED;
  797. }
  798. /**
  799. * stm32_spi_prepare_msg - set up the controller to transfer a single message
  800. * @master: controller master interface
  801. * @msg: pointer to spi message
  802. */
  803. static int stm32_spi_prepare_msg(struct spi_master *master,
  804. struct spi_message *msg)
  805. {
  806. struct stm32_spi *spi = spi_master_get_devdata(master);
  807. struct spi_device *spi_dev = msg->spi;
  808. struct device_node *np = spi_dev->dev.of_node;
  809. unsigned long flags;
  810. u32 clrb = 0, setb = 0;
  811. /* SPI slave device may need time between data frames */
  812. spi->cur_midi = 0;
  813. if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi))
  814. dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi);
  815. if (spi_dev->mode & SPI_CPOL)
  816. setb |= spi->cfg->regs->cpol.mask;
  817. else
  818. clrb |= spi->cfg->regs->cpol.mask;
  819. if (spi_dev->mode & SPI_CPHA)
  820. setb |= spi->cfg->regs->cpha.mask;
  821. else
  822. clrb |= spi->cfg->regs->cpha.mask;
  823. if (spi_dev->mode & SPI_LSB_FIRST)
  824. setb |= spi->cfg->regs->lsb_first.mask;
  825. else
  826. clrb |= spi->cfg->regs->lsb_first.mask;
  827. dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n",
  828. !!(spi_dev->mode & SPI_CPOL),
  829. !!(spi_dev->mode & SPI_CPHA),
  830. !!(spi_dev->mode & SPI_LSB_FIRST),
  831. !!(spi_dev->mode & SPI_CS_HIGH));
  832. /* On STM32H7, messages should not exceed a maximum size setted
  833. * afterward via the set_number_of_data function. In order to
  834. * ensure that, split large messages into several messages
  835. */
  836. if (spi->cfg->set_number_of_data) {
  837. int ret;
  838. ret = spi_split_transfers_maxsize(master, msg,
  839. STM32H7_SPI_TSIZE_MAX,
  840. GFP_KERNEL | GFP_DMA);
  841. if (ret)
  842. return ret;
  843. }
  844. spin_lock_irqsave(&spi->lock, flags);
  845. /* CPOL, CPHA and LSB FIRST bits have common register */
  846. if (clrb || setb)
  847. writel_relaxed(
  848. (readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) &
  849. ~clrb) | setb,
  850. spi->base + spi->cfg->regs->cpol.reg);
  851. spin_unlock_irqrestore(&spi->lock, flags);
  852. return 0;
  853. }
  854. /**
  855. * stm32f4_spi_dma_tx_cb - dma callback
  856. * @data: pointer to the spi controller data structure
  857. *
  858. * DMA callback is called when the transfer is complete for DMA TX channel.
  859. */
  860. static void stm32f4_spi_dma_tx_cb(void *data)
  861. {
  862. struct stm32_spi *spi = data;
  863. if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
  864. spi_finalize_current_transfer(spi->master);
  865. stm32f4_spi_disable(spi);
  866. }
  867. }
  868. /**
  869. * stm32_spi_dma_rx_cb - dma callback
  870. * @data: pointer to the spi controller data structure
  871. *
  872. * DMA callback is called when the transfer is complete for DMA RX channel.
  873. */
  874. static void stm32_spi_dma_rx_cb(void *data)
  875. {
  876. struct stm32_spi *spi = data;
  877. spi_finalize_current_transfer(spi->master);
  878. spi->cfg->disable(spi);
  879. }
  880. /**
  881. * stm32_spi_dma_config - configure dma slave channel depending on current
  882. * transfer bits_per_word.
  883. * @spi: pointer to the spi controller data structure
  884. * @dma_conf: pointer to the dma_slave_config structure
  885. * @dir: direction of the dma transfer
  886. */
  887. static void stm32_spi_dma_config(struct stm32_spi *spi,
  888. struct dma_slave_config *dma_conf,
  889. enum dma_transfer_direction dir)
  890. {
  891. enum dma_slave_buswidth buswidth;
  892. u32 maxburst;
  893. if (spi->cur_bpw <= 8)
  894. buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
  895. else if (spi->cur_bpw <= 16)
  896. buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
  897. else
  898. buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
  899. if (spi->cfg->has_fifo) {
  900. /* Valid for DMA Half or Full Fifo threshold */
  901. if (spi->cur_fthlv == 2)
  902. maxburst = 1;
  903. else
  904. maxburst = spi->cur_fthlv;
  905. } else {
  906. maxburst = 1;
  907. }
  908. memset(dma_conf, 0, sizeof(struct dma_slave_config));
  909. dma_conf->direction = dir;
  910. if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */
  911. dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg;
  912. dma_conf->src_addr_width = buswidth;
  913. dma_conf->src_maxburst = maxburst;
  914. dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n",
  915. buswidth, maxburst);
  916. } else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */
  917. dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg;
  918. dma_conf->dst_addr_width = buswidth;
  919. dma_conf->dst_maxburst = maxburst;
  920. dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n",
  921. buswidth, maxburst);
  922. }
  923. }
  924. /**
  925. * stm32f4_spi_transfer_one_irq - transfer a single spi_transfer using
  926. * interrupts
  927. * @spi: pointer to the spi controller data structure
  928. *
  929. * It must returns 0 if the transfer is finished or 1 if the transfer is still
  930. * in progress.
  931. */
  932. static int stm32f4_spi_transfer_one_irq(struct stm32_spi *spi)
  933. {
  934. unsigned long flags;
  935. u32 cr2 = 0;
  936. /* Enable the interrupts relative to the current communication mode */
  937. if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
  938. cr2 |= STM32F4_SPI_CR2_TXEIE;
  939. } else if (spi->cur_comm == SPI_FULL_DUPLEX ||
  940. spi->cur_comm == SPI_SIMPLEX_RX ||
  941. spi->cur_comm == SPI_3WIRE_RX) {
  942. /* In transmit-only mode, the OVR flag is set in the SR register
  943. * since the received data are never read. Therefore set OVR
  944. * interrupt only when rx buffer is available.
  945. */
  946. cr2 |= STM32F4_SPI_CR2_RXNEIE | STM32F4_SPI_CR2_ERRIE;
  947. } else {
  948. return -EINVAL;
  949. }
  950. spin_lock_irqsave(&spi->lock, flags);
  951. stm32_spi_set_bits(spi, STM32F4_SPI_CR2, cr2);
  952. stm32_spi_enable(spi);
  953. /* starting data transfer when buffer is loaded */
  954. if (spi->tx_buf)
  955. stm32f4_spi_write_tx(spi);
  956. spin_unlock_irqrestore(&spi->lock, flags);
  957. return 1;
  958. }
  959. /**
  960. * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using
  961. * interrupts
  962. * @spi: pointer to the spi controller data structure
  963. *
  964. * It must returns 0 if the transfer is finished or 1 if the transfer is still
  965. * in progress.
  966. */
  967. static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi)
  968. {
  969. unsigned long flags;
  970. u32 ier = 0;
  971. /* Enable the interrupts relative to the current communication mode */
  972. if (spi->tx_buf && spi->rx_buf) /* Full Duplex */
  973. ier |= STM32H7_SPI_IER_DXPIE;
  974. else if (spi->tx_buf) /* Half-Duplex TX dir or Simplex TX */
  975. ier |= STM32H7_SPI_IER_TXPIE;
  976. else if (spi->rx_buf) /* Half-Duplex RX dir or Simplex RX */
  977. ier |= STM32H7_SPI_IER_RXPIE;
  978. /* Enable the interrupts relative to the end of transfer */
  979. ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE |
  980. STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
  981. spin_lock_irqsave(&spi->lock, flags);
  982. stm32_spi_enable(spi);
  983. /* Be sure to have data in fifo before starting data transfer */
  984. if (spi->tx_buf)
  985. stm32h7_spi_write_txfifo(spi);
  986. stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
  987. writel_relaxed(ier, spi->base + STM32H7_SPI_IER);
  988. spin_unlock_irqrestore(&spi->lock, flags);
  989. return 1;
  990. }
  991. /**
  992. * stm32f4_spi_transfer_one_dma_start - Set SPI driver registers to start
  993. * transfer using DMA
  994. * @spi: pointer to the spi controller data structure
  995. */
  996. static void stm32f4_spi_transfer_one_dma_start(struct stm32_spi *spi)
  997. {
  998. /* In DMA mode end of transfer is handled by DMA TX or RX callback. */
  999. if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX ||
  1000. spi->cur_comm == SPI_FULL_DUPLEX) {
  1001. /*
  1002. * In transmit-only mode, the OVR flag is set in the SR register
  1003. * since the received data are never read. Therefore set OVR
  1004. * interrupt only when rx buffer is available.
  1005. */
  1006. stm32_spi_set_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_ERRIE);
  1007. }
  1008. stm32_spi_enable(spi);
  1009. }
  1010. /**
  1011. * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start
  1012. * transfer using DMA
  1013. * @spi: pointer to the spi controller data structure
  1014. */
  1015. static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi)
  1016. {
  1017. uint32_t ier = STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
  1018. /* Enable the interrupts */
  1019. if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX)
  1020. ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE;
  1021. stm32_spi_set_bits(spi, STM32H7_SPI_IER, ier);
  1022. stm32_spi_enable(spi);
  1023. stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
  1024. }
  1025. /**
  1026. * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA
  1027. * @spi: pointer to the spi controller data structure
  1028. * @xfer: pointer to the spi_transfer structure
  1029. *
  1030. * It must returns 0 if the transfer is finished or 1 if the transfer is still
  1031. * in progress.
  1032. */
  1033. static int stm32_spi_transfer_one_dma(struct stm32_spi *spi,
  1034. struct spi_transfer *xfer)
  1035. {
  1036. struct dma_slave_config tx_dma_conf, rx_dma_conf;
  1037. struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc;
  1038. unsigned long flags;
  1039. spin_lock_irqsave(&spi->lock, flags);
  1040. rx_dma_desc = NULL;
  1041. if (spi->rx_buf && spi->dma_rx) {
  1042. stm32_spi_dma_config(spi, &rx_dma_conf, DMA_DEV_TO_MEM);
  1043. dmaengine_slave_config(spi->dma_rx, &rx_dma_conf);
  1044. /* Enable Rx DMA request */
  1045. stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg,
  1046. spi->cfg->regs->dma_rx_en.mask);
  1047. rx_dma_desc = dmaengine_prep_slave_sg(
  1048. spi->dma_rx, xfer->rx_sg.sgl,
  1049. xfer->rx_sg.nents,
  1050. rx_dma_conf.direction,
  1051. DMA_PREP_INTERRUPT);
  1052. }
  1053. tx_dma_desc = NULL;
  1054. if (spi->tx_buf && spi->dma_tx) {
  1055. stm32_spi_dma_config(spi, &tx_dma_conf, DMA_MEM_TO_DEV);
  1056. dmaengine_slave_config(spi->dma_tx, &tx_dma_conf);
  1057. tx_dma_desc = dmaengine_prep_slave_sg(
  1058. spi->dma_tx, xfer->tx_sg.sgl,
  1059. xfer->tx_sg.nents,
  1060. tx_dma_conf.direction,
  1061. DMA_PREP_INTERRUPT);
  1062. }
  1063. if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) ||
  1064. (spi->rx_buf && spi->dma_rx && !rx_dma_desc))
  1065. goto dma_desc_error;
  1066. if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc))
  1067. goto dma_desc_error;
  1068. if (rx_dma_desc) {
  1069. rx_dma_desc->callback = spi->cfg->dma_rx_cb;
  1070. rx_dma_desc->callback_param = spi;
  1071. if (dma_submit_error(dmaengine_submit(rx_dma_desc))) {
  1072. dev_err(spi->dev, "Rx DMA submit failed\n");
  1073. goto dma_desc_error;
  1074. }
  1075. /* Enable Rx DMA channel */
  1076. dma_async_issue_pending(spi->dma_rx);
  1077. }
  1078. if (tx_dma_desc) {
  1079. if (spi->cur_comm == SPI_SIMPLEX_TX ||
  1080. spi->cur_comm == SPI_3WIRE_TX) {
  1081. tx_dma_desc->callback = spi->cfg->dma_tx_cb;
  1082. tx_dma_desc->callback_param = spi;
  1083. }
  1084. if (dma_submit_error(dmaengine_submit(tx_dma_desc))) {
  1085. dev_err(spi->dev, "Tx DMA submit failed\n");
  1086. goto dma_submit_error;
  1087. }
  1088. /* Enable Tx DMA channel */
  1089. dma_async_issue_pending(spi->dma_tx);
  1090. /* Enable Tx DMA request */
  1091. stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg,
  1092. spi->cfg->regs->dma_tx_en.mask);
  1093. }
  1094. spi->cfg->transfer_one_dma_start(spi);
  1095. spin_unlock_irqrestore(&spi->lock, flags);
  1096. return 1;
  1097. dma_submit_error:
  1098. if (spi->dma_rx)
  1099. dmaengine_terminate_all(spi->dma_rx);
  1100. dma_desc_error:
  1101. stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg,
  1102. spi->cfg->regs->dma_rx_en.mask);
  1103. spin_unlock_irqrestore(&spi->lock, flags);
  1104. dev_info(spi->dev, "DMA issue: fall back to irq transfer\n");
  1105. spi->cur_usedma = false;
  1106. return spi->cfg->transfer_one_irq(spi);
  1107. }
  1108. /**
  1109. * stm32f4_spi_set_bpw - Configure bits per word
  1110. * @spi: pointer to the spi controller data structure
  1111. */
  1112. static void stm32f4_spi_set_bpw(struct stm32_spi *spi)
  1113. {
  1114. if (spi->cur_bpw == 16)
  1115. stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF);
  1116. else
  1117. stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF);
  1118. }
  1119. /**
  1120. * stm32h7_spi_set_bpw - configure bits per word
  1121. * @spi: pointer to the spi controller data structure
  1122. */
  1123. static void stm32h7_spi_set_bpw(struct stm32_spi *spi)
  1124. {
  1125. u32 bpw, fthlv;
  1126. u32 cfg1_clrb = 0, cfg1_setb = 0;
  1127. bpw = spi->cur_bpw - 1;
  1128. cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE;
  1129. cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_DSIZE, bpw);
  1130. spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen);
  1131. fthlv = spi->cur_fthlv - 1;
  1132. cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV;
  1133. cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_FTHLV, fthlv);
  1134. writel_relaxed(
  1135. (readl_relaxed(spi->base + STM32H7_SPI_CFG1) &
  1136. ~cfg1_clrb) | cfg1_setb,
  1137. spi->base + STM32H7_SPI_CFG1);
  1138. }
  1139. /**
  1140. * stm32_spi_set_mbr - Configure baud rate divisor in master mode
  1141. * @spi: pointer to the spi controller data structure
  1142. * @mbrdiv: baud rate divisor value
  1143. */
  1144. static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv)
  1145. {
  1146. u32 clrb = 0, setb = 0;
  1147. clrb |= spi->cfg->regs->br.mask;
  1148. setb |= (mbrdiv << spi->cfg->regs->br.shift) & spi->cfg->regs->br.mask;
  1149. writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) &
  1150. ~clrb) | setb,
  1151. spi->base + spi->cfg->regs->br.reg);
  1152. }
  1153. /**
  1154. * stm32_spi_communication_type - return transfer communication type
  1155. * @spi_dev: pointer to the spi device
  1156. * @transfer: pointer to spi transfer
  1157. */
  1158. static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev,
  1159. struct spi_transfer *transfer)
  1160. {
  1161. unsigned int type = SPI_FULL_DUPLEX;
  1162. if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */
  1163. /*
  1164. * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL
  1165. * is forbidden and unvalidated by SPI subsystem so depending
  1166. * on the valid buffer, we can determine the direction of the
  1167. * transfer.
  1168. */
  1169. if (!transfer->tx_buf)
  1170. type = SPI_3WIRE_RX;
  1171. else
  1172. type = SPI_3WIRE_TX;
  1173. } else {
  1174. if (!transfer->tx_buf)
  1175. type = SPI_SIMPLEX_RX;
  1176. else if (!transfer->rx_buf)
  1177. type = SPI_SIMPLEX_TX;
  1178. }
  1179. return type;
  1180. }
  1181. /**
  1182. * stm32f4_spi_set_mode - configure communication mode
  1183. * @spi: pointer to the spi controller data structure
  1184. * @comm_type: type of communication to configure
  1185. */
  1186. static int stm32f4_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
  1187. {
  1188. if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) {
  1189. stm32_spi_set_bits(spi, STM32F4_SPI_CR1,
  1190. STM32F4_SPI_CR1_BIDIMODE |
  1191. STM32F4_SPI_CR1_BIDIOE);
  1192. } else if (comm_type == SPI_FULL_DUPLEX ||
  1193. comm_type == SPI_SIMPLEX_RX) {
  1194. stm32_spi_clr_bits(spi, STM32F4_SPI_CR1,
  1195. STM32F4_SPI_CR1_BIDIMODE |
  1196. STM32F4_SPI_CR1_BIDIOE);
  1197. } else if (comm_type == SPI_3WIRE_RX) {
  1198. stm32_spi_set_bits(spi, STM32F4_SPI_CR1,
  1199. STM32F4_SPI_CR1_BIDIMODE);
  1200. stm32_spi_clr_bits(spi, STM32F4_SPI_CR1,
  1201. STM32F4_SPI_CR1_BIDIOE);
  1202. } else {
  1203. return -EINVAL;
  1204. }
  1205. return 0;
  1206. }
  1207. /**
  1208. * stm32h7_spi_set_mode - configure communication mode
  1209. * @spi: pointer to the spi controller data structure
  1210. * @comm_type: type of communication to configure
  1211. */
  1212. static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
  1213. {
  1214. u32 mode;
  1215. u32 cfg2_clrb = 0, cfg2_setb = 0;
  1216. if (comm_type == SPI_3WIRE_RX) {
  1217. mode = STM32H7_SPI_HALF_DUPLEX;
  1218. stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
  1219. } else if (comm_type == SPI_3WIRE_TX) {
  1220. mode = STM32H7_SPI_HALF_DUPLEX;
  1221. stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
  1222. } else if (comm_type == SPI_SIMPLEX_RX) {
  1223. mode = STM32H7_SPI_SIMPLEX_RX;
  1224. } else if (comm_type == SPI_SIMPLEX_TX) {
  1225. mode = STM32H7_SPI_SIMPLEX_TX;
  1226. } else {
  1227. mode = STM32H7_SPI_FULL_DUPLEX;
  1228. }
  1229. cfg2_clrb |= STM32H7_SPI_CFG2_COMM;
  1230. cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_COMM, mode);
  1231. writel_relaxed(
  1232. (readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
  1233. ~cfg2_clrb) | cfg2_setb,
  1234. spi->base + STM32H7_SPI_CFG2);
  1235. return 0;
  1236. }
  1237. /**
  1238. * stm32h7_spi_data_idleness - configure minimum time delay inserted between two
  1239. * consecutive data frames in master mode
  1240. * @spi: pointer to the spi controller data structure
  1241. * @len: transfer len
  1242. */
  1243. static void stm32h7_spi_data_idleness(struct stm32_spi *spi, u32 len)
  1244. {
  1245. u32 cfg2_clrb = 0, cfg2_setb = 0;
  1246. cfg2_clrb |= STM32H7_SPI_CFG2_MIDI;
  1247. if ((len > 1) && (spi->cur_midi > 0)) {
  1248. u32 sck_period_ns = DIV_ROUND_UP(NSEC_PER_SEC, spi->cur_speed);
  1249. u32 midi = min_t(u32,
  1250. DIV_ROUND_UP(spi->cur_midi, sck_period_ns),
  1251. FIELD_GET(STM32H7_SPI_CFG2_MIDI,
  1252. STM32H7_SPI_CFG2_MIDI));
  1253. dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n",
  1254. sck_period_ns, midi, midi * sck_period_ns);
  1255. cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_MIDI, midi);
  1256. }
  1257. writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
  1258. ~cfg2_clrb) | cfg2_setb,
  1259. spi->base + STM32H7_SPI_CFG2);
  1260. }
  1261. /**
  1262. * stm32h7_spi_number_of_data - configure number of data at current transfer
  1263. * @spi: pointer to the spi controller data structure
  1264. * @nb_words: transfer length (in words)
  1265. */
  1266. static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words)
  1267. {
  1268. if (nb_words <= STM32H7_SPI_TSIZE_MAX) {
  1269. writel_relaxed(FIELD_PREP(STM32H7_SPI_CR2_TSIZE, nb_words),
  1270. spi->base + STM32H7_SPI_CR2);
  1271. } else {
  1272. return -EMSGSIZE;
  1273. }
  1274. return 0;
  1275. }
  1276. /**
  1277. * stm32_spi_transfer_one_setup - common setup to transfer a single
  1278. * spi_transfer either using DMA or
  1279. * interrupts.
  1280. * @spi: pointer to the spi controller data structure
  1281. * @spi_dev: pointer to the spi device
  1282. * @transfer: pointer to spi transfer
  1283. */
  1284. static int stm32_spi_transfer_one_setup(struct stm32_spi *spi,
  1285. struct spi_device *spi_dev,
  1286. struct spi_transfer *transfer)
  1287. {
  1288. unsigned long flags;
  1289. unsigned int comm_type;
  1290. int nb_words, ret = 0;
  1291. int mbr;
  1292. spin_lock_irqsave(&spi->lock, flags);
  1293. spi->cur_xferlen = transfer->len;
  1294. spi->cur_bpw = transfer->bits_per_word;
  1295. spi->cfg->set_bpw(spi);
  1296. /* Update spi->cur_speed with real clock speed */
  1297. mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz,
  1298. spi->cfg->baud_rate_div_min,
  1299. spi->cfg->baud_rate_div_max);
  1300. if (mbr < 0) {
  1301. ret = mbr;
  1302. goto out;
  1303. }
  1304. transfer->speed_hz = spi->cur_speed;
  1305. stm32_spi_set_mbr(spi, mbr);
  1306. comm_type = stm32_spi_communication_type(spi_dev, transfer);
  1307. ret = spi->cfg->set_mode(spi, comm_type);
  1308. if (ret < 0)
  1309. goto out;
  1310. spi->cur_comm = comm_type;
  1311. if (spi->cfg->set_data_idleness)
  1312. spi->cfg->set_data_idleness(spi, transfer->len);
  1313. if (spi->cur_bpw <= 8)
  1314. nb_words = transfer->len;
  1315. else if (spi->cur_bpw <= 16)
  1316. nb_words = DIV_ROUND_UP(transfer->len * 8, 16);
  1317. else
  1318. nb_words = DIV_ROUND_UP(transfer->len * 8, 32);
  1319. if (spi->cfg->set_number_of_data) {
  1320. ret = spi->cfg->set_number_of_data(spi, nb_words);
  1321. if (ret < 0)
  1322. goto out;
  1323. }
  1324. dev_dbg(spi->dev, "transfer communication mode set to %d\n",
  1325. spi->cur_comm);
  1326. dev_dbg(spi->dev,
  1327. "data frame of %d-bit, data packet of %d data frames\n",
  1328. spi->cur_bpw, spi->cur_fthlv);
  1329. dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed);
  1330. dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n",
  1331. spi->cur_xferlen, nb_words);
  1332. dev_dbg(spi->dev, "dma %s\n",
  1333. (spi->cur_usedma) ? "enabled" : "disabled");
  1334. out:
  1335. spin_unlock_irqrestore(&spi->lock, flags);
  1336. return ret;
  1337. }
  1338. /**
  1339. * stm32_spi_transfer_one - transfer a single spi_transfer
  1340. * @master: controller master interface
  1341. * @spi_dev: pointer to the spi device
  1342. * @transfer: pointer to spi transfer
  1343. *
  1344. * It must return 0 if the transfer is finished or 1 if the transfer is still
  1345. * in progress.
  1346. */
  1347. static int stm32_spi_transfer_one(struct spi_master *master,
  1348. struct spi_device *spi_dev,
  1349. struct spi_transfer *transfer)
  1350. {
  1351. struct stm32_spi *spi = spi_master_get_devdata(master);
  1352. int ret;
  1353. spi->tx_buf = transfer->tx_buf;
  1354. spi->rx_buf = transfer->rx_buf;
  1355. spi->tx_len = spi->tx_buf ? transfer->len : 0;
  1356. spi->rx_len = spi->rx_buf ? transfer->len : 0;
  1357. spi->cur_usedma = (master->can_dma &&
  1358. master->can_dma(master, spi_dev, transfer));
  1359. ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer);
  1360. if (ret) {
  1361. dev_err(spi->dev, "SPI transfer setup failed\n");
  1362. return ret;
  1363. }
  1364. if (spi->cur_usedma)
  1365. return stm32_spi_transfer_one_dma(spi, transfer);
  1366. else
  1367. return spi->cfg->transfer_one_irq(spi);
  1368. }
  1369. /**
  1370. * stm32_spi_unprepare_msg - relax the hardware
  1371. * @master: controller master interface
  1372. * @msg: pointer to the spi message
  1373. */
  1374. static int stm32_spi_unprepare_msg(struct spi_master *master,
  1375. struct spi_message *msg)
  1376. {
  1377. struct stm32_spi *spi = spi_master_get_devdata(master);
  1378. spi->cfg->disable(spi);
  1379. return 0;
  1380. }
  1381. /**
  1382. * stm32f4_spi_config - Configure SPI controller as SPI master
  1383. * @spi: pointer to the spi controller data structure
  1384. */
  1385. static int stm32f4_spi_config(struct stm32_spi *spi)
  1386. {
  1387. unsigned long flags;
  1388. spin_lock_irqsave(&spi->lock, flags);
  1389. /* Ensure I2SMOD bit is kept cleared */
  1390. stm32_spi_clr_bits(spi, STM32F4_SPI_I2SCFGR,
  1391. STM32F4_SPI_I2SCFGR_I2SMOD);
  1392. /*
  1393. * - SS input value high
  1394. * - transmitter half duplex direction
  1395. * - Set the master mode (default Motorola mode)
  1396. * - Consider 1 master/n slaves configuration and
  1397. * SS input value is determined by the SSI bit
  1398. */
  1399. stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SSI |
  1400. STM32F4_SPI_CR1_BIDIOE |
  1401. STM32F4_SPI_CR1_MSTR |
  1402. STM32F4_SPI_CR1_SSM);
  1403. spin_unlock_irqrestore(&spi->lock, flags);
  1404. return 0;
  1405. }
  1406. /**
  1407. * stm32h7_spi_config - Configure SPI controller as SPI master
  1408. * @spi: pointer to the spi controller data structure
  1409. */
  1410. static int stm32h7_spi_config(struct stm32_spi *spi)
  1411. {
  1412. unsigned long flags;
  1413. spin_lock_irqsave(&spi->lock, flags);
  1414. /* Ensure I2SMOD bit is kept cleared */
  1415. stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR,
  1416. STM32H7_SPI_I2SCFGR_I2SMOD);
  1417. /*
  1418. * - SS input value high
  1419. * - transmitter half duplex direction
  1420. * - automatic communication suspend when RX-Fifo is full
  1421. */
  1422. stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SSI |
  1423. STM32H7_SPI_CR1_HDDIR |
  1424. STM32H7_SPI_CR1_MASRX);
  1425. /*
  1426. * - Set the master mode (default Motorola mode)
  1427. * - Consider 1 master/n slaves configuration and
  1428. * SS input value is determined by the SSI bit
  1429. * - keep control of all associated GPIOs
  1430. */
  1431. stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_MASTER |
  1432. STM32H7_SPI_CFG2_SSM |
  1433. STM32H7_SPI_CFG2_AFCNTR);
  1434. spin_unlock_irqrestore(&spi->lock, flags);
  1435. return 0;
  1436. }
  1437. static const struct stm32_spi_cfg stm32f4_spi_cfg = {
  1438. .regs = &stm32f4_spi_regspec,
  1439. .get_bpw_mask = stm32f4_spi_get_bpw_mask,
  1440. .disable = stm32f4_spi_disable,
  1441. .config = stm32f4_spi_config,
  1442. .set_bpw = stm32f4_spi_set_bpw,
  1443. .set_mode = stm32f4_spi_set_mode,
  1444. .transfer_one_dma_start = stm32f4_spi_transfer_one_dma_start,
  1445. .dma_tx_cb = stm32f4_spi_dma_tx_cb,
  1446. .dma_rx_cb = stm32_spi_dma_rx_cb,
  1447. .transfer_one_irq = stm32f4_spi_transfer_one_irq,
  1448. .irq_handler_event = stm32f4_spi_irq_event,
  1449. .irq_handler_thread = stm32f4_spi_irq_thread,
  1450. .baud_rate_div_min = STM32F4_SPI_BR_DIV_MIN,
  1451. .baud_rate_div_max = STM32F4_SPI_BR_DIV_MAX,
  1452. .has_fifo = false,
  1453. .flags = SPI_MASTER_MUST_TX,
  1454. };
  1455. static const struct stm32_spi_cfg stm32h7_spi_cfg = {
  1456. .regs = &stm32h7_spi_regspec,
  1457. .get_fifo_size = stm32h7_spi_get_fifo_size,
  1458. .get_bpw_mask = stm32h7_spi_get_bpw_mask,
  1459. .disable = stm32h7_spi_disable,
  1460. .config = stm32h7_spi_config,
  1461. .set_bpw = stm32h7_spi_set_bpw,
  1462. .set_mode = stm32h7_spi_set_mode,
  1463. .set_data_idleness = stm32h7_spi_data_idleness,
  1464. .set_number_of_data = stm32h7_spi_number_of_data,
  1465. .transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start,
  1466. .dma_rx_cb = stm32_spi_dma_rx_cb,
  1467. /*
  1468. * dma_tx_cb is not necessary since in case of TX, dma is followed by
  1469. * SPI access hence handling is performed within the SPI interrupt
  1470. */
  1471. .transfer_one_irq = stm32h7_spi_transfer_one_irq,
  1472. .irq_handler_thread = stm32h7_spi_irq_thread,
  1473. .baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN,
  1474. .baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX,
  1475. .has_fifo = true,
  1476. };
  1477. static const struct of_device_id stm32_spi_of_match[] = {
  1478. { .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg },
  1479. { .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg },
  1480. {},
  1481. };
  1482. MODULE_DEVICE_TABLE(of, stm32_spi_of_match);
  1483. static int stm32_spi_probe(struct platform_device *pdev)
  1484. {
  1485. struct spi_master *master;
  1486. struct stm32_spi *spi;
  1487. struct resource *res;
  1488. struct reset_control *rst;
  1489. int ret;
  1490. master = devm_spi_alloc_master(&pdev->dev, sizeof(struct stm32_spi));
  1491. if (!master) {
  1492. dev_err(&pdev->dev, "spi master allocation failed\n");
  1493. return -ENOMEM;
  1494. }
  1495. platform_set_drvdata(pdev, master);
  1496. spi = spi_master_get_devdata(master);
  1497. spi->dev = &pdev->dev;
  1498. spi->master = master;
  1499. spin_lock_init(&spi->lock);
  1500. spi->cfg = (const struct stm32_spi_cfg *)
  1501. of_match_device(pdev->dev.driver->of_match_table,
  1502. &pdev->dev)->data;
  1503. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  1504. spi->base = devm_ioremap_resource(&pdev->dev, res);
  1505. if (IS_ERR(spi->base))
  1506. return PTR_ERR(spi->base);
  1507. spi->phys_addr = (dma_addr_t)res->start;
  1508. spi->irq = platform_get_irq(pdev, 0);
  1509. if (spi->irq <= 0)
  1510. return dev_err_probe(&pdev->dev, spi->irq,
  1511. "failed to get irq\n");
  1512. ret = devm_request_threaded_irq(&pdev->dev, spi->irq,
  1513. spi->cfg->irq_handler_event,
  1514. spi->cfg->irq_handler_thread,
  1515. IRQF_ONESHOT, pdev->name, master);
  1516. if (ret) {
  1517. dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq,
  1518. ret);
  1519. return ret;
  1520. }
  1521. spi->clk = devm_clk_get(&pdev->dev, NULL);
  1522. if (IS_ERR(spi->clk)) {
  1523. ret = PTR_ERR(spi->clk);
  1524. dev_err(&pdev->dev, "clk get failed: %d\n", ret);
  1525. return ret;
  1526. }
  1527. ret = clk_prepare_enable(spi->clk);
  1528. if (ret) {
  1529. dev_err(&pdev->dev, "clk enable failed: %d\n", ret);
  1530. return ret;
  1531. }
  1532. spi->clk_rate = clk_get_rate(spi->clk);
  1533. if (!spi->clk_rate) {
  1534. dev_err(&pdev->dev, "clk rate = 0\n");
  1535. ret = -EINVAL;
  1536. goto err_clk_disable;
  1537. }
  1538. rst = devm_reset_control_get_optional_exclusive(&pdev->dev, NULL);
  1539. if (rst) {
  1540. if (IS_ERR(rst)) {
  1541. ret = dev_err_probe(&pdev->dev, PTR_ERR(rst),
  1542. "failed to get reset\n");
  1543. goto err_clk_disable;
  1544. }
  1545. reset_control_assert(rst);
  1546. udelay(2);
  1547. reset_control_deassert(rst);
  1548. }
  1549. if (spi->cfg->has_fifo)
  1550. spi->fifo_size = spi->cfg->get_fifo_size(spi);
  1551. ret = spi->cfg->config(spi);
  1552. if (ret) {
  1553. dev_err(&pdev->dev, "controller configuration failed: %d\n",
  1554. ret);
  1555. goto err_clk_disable;
  1556. }
  1557. master->dev.of_node = pdev->dev.of_node;
  1558. master->auto_runtime_pm = true;
  1559. master->bus_num = pdev->id;
  1560. master->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST |
  1561. SPI_3WIRE;
  1562. master->bits_per_word_mask = spi->cfg->get_bpw_mask(spi);
  1563. master->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min;
  1564. master->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max;
  1565. master->use_gpio_descriptors = true;
  1566. master->prepare_message = stm32_spi_prepare_msg;
  1567. master->transfer_one = stm32_spi_transfer_one;
  1568. master->unprepare_message = stm32_spi_unprepare_msg;
  1569. master->flags = spi->cfg->flags;
  1570. spi->dma_tx = dma_request_chan(spi->dev, "tx");
  1571. if (IS_ERR(spi->dma_tx)) {
  1572. ret = PTR_ERR(spi->dma_tx);
  1573. spi->dma_tx = NULL;
  1574. if (ret == -EPROBE_DEFER)
  1575. goto err_clk_disable;
  1576. dev_warn(&pdev->dev, "failed to request tx dma channel\n");
  1577. } else {
  1578. master->dma_tx = spi->dma_tx;
  1579. }
  1580. spi->dma_rx = dma_request_chan(spi->dev, "rx");
  1581. if (IS_ERR(spi->dma_rx)) {
  1582. ret = PTR_ERR(spi->dma_rx);
  1583. spi->dma_rx = NULL;
  1584. if (ret == -EPROBE_DEFER)
  1585. goto err_dma_release;
  1586. dev_warn(&pdev->dev, "failed to request rx dma channel\n");
  1587. } else {
  1588. master->dma_rx = spi->dma_rx;
  1589. }
  1590. if (spi->dma_tx || spi->dma_rx)
  1591. master->can_dma = stm32_spi_can_dma;
  1592. pm_runtime_set_autosuspend_delay(&pdev->dev,
  1593. STM32_SPI_AUTOSUSPEND_DELAY);
  1594. pm_runtime_use_autosuspend(&pdev->dev);
  1595. pm_runtime_set_active(&pdev->dev);
  1596. pm_runtime_get_noresume(&pdev->dev);
  1597. pm_runtime_enable(&pdev->dev);
  1598. ret = spi_register_master(master);
  1599. if (ret) {
  1600. dev_err(&pdev->dev, "spi master registration failed: %d\n",
  1601. ret);
  1602. goto err_pm_disable;
  1603. }
  1604. pm_runtime_mark_last_busy(&pdev->dev);
  1605. pm_runtime_put_autosuspend(&pdev->dev);
  1606. dev_info(&pdev->dev, "driver initialized\n");
  1607. return 0;
  1608. err_pm_disable:
  1609. pm_runtime_disable(&pdev->dev);
  1610. pm_runtime_put_noidle(&pdev->dev);
  1611. pm_runtime_set_suspended(&pdev->dev);
  1612. pm_runtime_dont_use_autosuspend(&pdev->dev);
  1613. err_dma_release:
  1614. if (spi->dma_tx)
  1615. dma_release_channel(spi->dma_tx);
  1616. if (spi->dma_rx)
  1617. dma_release_channel(spi->dma_rx);
  1618. err_clk_disable:
  1619. clk_disable_unprepare(spi->clk);
  1620. return ret;
  1621. }
  1622. static int stm32_spi_remove(struct platform_device *pdev)
  1623. {
  1624. struct spi_master *master = platform_get_drvdata(pdev);
  1625. struct stm32_spi *spi = spi_master_get_devdata(master);
  1626. pm_runtime_get_sync(&pdev->dev);
  1627. spi_unregister_master(master);
  1628. spi->cfg->disable(spi);
  1629. pm_runtime_disable(&pdev->dev);
  1630. pm_runtime_put_noidle(&pdev->dev);
  1631. pm_runtime_set_suspended(&pdev->dev);
  1632. pm_runtime_dont_use_autosuspend(&pdev->dev);
  1633. if (master->dma_tx)
  1634. dma_release_channel(master->dma_tx);
  1635. if (master->dma_rx)
  1636. dma_release_channel(master->dma_rx);
  1637. clk_disable_unprepare(spi->clk);
  1638. pinctrl_pm_select_sleep_state(&pdev->dev);
  1639. return 0;
  1640. }
  1641. static int __maybe_unused stm32_spi_runtime_suspend(struct device *dev)
  1642. {
  1643. struct spi_master *master = dev_get_drvdata(dev);
  1644. struct stm32_spi *spi = spi_master_get_devdata(master);
  1645. clk_disable_unprepare(spi->clk);
  1646. return pinctrl_pm_select_sleep_state(dev);
  1647. }
  1648. static int __maybe_unused stm32_spi_runtime_resume(struct device *dev)
  1649. {
  1650. struct spi_master *master = dev_get_drvdata(dev);
  1651. struct stm32_spi *spi = spi_master_get_devdata(master);
  1652. int ret;
  1653. ret = pinctrl_pm_select_default_state(dev);
  1654. if (ret)
  1655. return ret;
  1656. return clk_prepare_enable(spi->clk);
  1657. }
  1658. static int __maybe_unused stm32_spi_suspend(struct device *dev)
  1659. {
  1660. struct spi_master *master = dev_get_drvdata(dev);
  1661. int ret;
  1662. ret = spi_master_suspend(master);
  1663. if (ret)
  1664. return ret;
  1665. return pm_runtime_force_suspend(dev);
  1666. }
  1667. static int __maybe_unused stm32_spi_resume(struct device *dev)
  1668. {
  1669. struct spi_master *master = dev_get_drvdata(dev);
  1670. struct stm32_spi *spi = spi_master_get_devdata(master);
  1671. int ret;
  1672. ret = pm_runtime_force_resume(dev);
  1673. if (ret)
  1674. return ret;
  1675. ret = spi_master_resume(master);
  1676. if (ret) {
  1677. clk_disable_unprepare(spi->clk);
  1678. return ret;
  1679. }
  1680. ret = pm_runtime_resume_and_get(dev);
  1681. if (ret < 0) {
  1682. dev_err(dev, "Unable to power device:%d\n", ret);
  1683. return ret;
  1684. }
  1685. spi->cfg->config(spi);
  1686. pm_runtime_mark_last_busy(dev);
  1687. pm_runtime_put_autosuspend(dev);
  1688. return 0;
  1689. }
  1690. static const struct dev_pm_ops stm32_spi_pm_ops = {
  1691. SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume)
  1692. SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend,
  1693. stm32_spi_runtime_resume, NULL)
  1694. };
  1695. static struct platform_driver stm32_spi_driver = {
  1696. .probe = stm32_spi_probe,
  1697. .remove = stm32_spi_remove,
  1698. .driver = {
  1699. .name = DRIVER_NAME,
  1700. .pm = &stm32_spi_pm_ops,
  1701. .of_match_table = stm32_spi_of_match,
  1702. },
  1703. };
  1704. module_platform_driver(stm32_spi_driver);
  1705. MODULE_ALIAS("platform:" DRIVER_NAME);
  1706. MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver");
  1707. MODULE_AUTHOR("Amelie Delaunay <[email protected]>");
  1708. MODULE_LICENSE("GPL v2");