hda-loader-skl.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580
  1. // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
  2. //
  3. // This file is provided under a dual BSD/GPLv2 license. When using or
  4. // redistributing this file, you may do so under either license.
  5. //
  6. // Copyright(c) 2018-2022 Intel Corporation. All rights reserved.
  7. //
  8. #include <linux/delay.h>
  9. #include <linux/device.h>
  10. #include <linux/dma-mapping.h>
  11. #include <linux/firmware.h>
  12. #include <linux/fs.h>
  13. #include <linux/interrupt.h>
  14. #include <linux/mm.h>
  15. #include <linux/module.h>
  16. #include <linux/pci.h>
  17. #include <linux/pm_runtime.h>
  18. #include <linux/slab.h>
  19. #include <sound/hdaudio_ext.h>
  20. #include <sound/sof.h>
  21. #include <sound/pcm_params.h>
  22. #include "../sof-priv.h"
  23. #include "../ops.h"
  24. #include "hda.h"
  25. #define HDA_SKL_WAIT_TIMEOUT 500 /* 500 msec */
  26. #define HDA_SKL_CLDMA_MAX_BUFFER_SIZE (32 * PAGE_SIZE)
  27. /* Stream Reset */
  28. #define HDA_CL_SD_CTL_SRST_SHIFT 0
  29. #define HDA_CL_SD_CTL_SRST(x) (((x) & 0x1) << \
  30. HDA_CL_SD_CTL_SRST_SHIFT)
  31. /* Stream Run */
  32. #define HDA_CL_SD_CTL_RUN_SHIFT 1
  33. #define HDA_CL_SD_CTL_RUN(x) (((x) & 0x1) << \
  34. HDA_CL_SD_CTL_RUN_SHIFT)
  35. /* Interrupt On Completion Enable */
  36. #define HDA_CL_SD_CTL_IOCE_SHIFT 2
  37. #define HDA_CL_SD_CTL_IOCE(x) (((x) & 0x1) << \
  38. HDA_CL_SD_CTL_IOCE_SHIFT)
  39. /* FIFO Error Interrupt Enable */
  40. #define HDA_CL_SD_CTL_FEIE_SHIFT 3
  41. #define HDA_CL_SD_CTL_FEIE(x) (((x) & 0x1) << \
  42. HDA_CL_SD_CTL_FEIE_SHIFT)
  43. /* Descriptor Error Interrupt Enable */
  44. #define HDA_CL_SD_CTL_DEIE_SHIFT 4
  45. #define HDA_CL_SD_CTL_DEIE(x) (((x) & 0x1) << \
  46. HDA_CL_SD_CTL_DEIE_SHIFT)
  47. /* FIFO Limit Change */
  48. #define HDA_CL_SD_CTL_FIFOLC_SHIFT 5
  49. #define HDA_CL_SD_CTL_FIFOLC(x) (((x) & 0x1) << \
  50. HDA_CL_SD_CTL_FIFOLC_SHIFT)
  51. /* Stripe Control */
  52. #define HDA_CL_SD_CTL_STRIPE_SHIFT 16
  53. #define HDA_CL_SD_CTL_STRIPE(x) (((x) & 0x3) << \
  54. HDA_CL_SD_CTL_STRIPE_SHIFT)
  55. /* Traffic Priority */
  56. #define HDA_CL_SD_CTL_TP_SHIFT 18
  57. #define HDA_CL_SD_CTL_TP(x) (((x) & 0x1) << \
  58. HDA_CL_SD_CTL_TP_SHIFT)
  59. /* Bidirectional Direction Control */
  60. #define HDA_CL_SD_CTL_DIR_SHIFT 19
  61. #define HDA_CL_SD_CTL_DIR(x) (((x) & 0x1) << \
  62. HDA_CL_SD_CTL_DIR_SHIFT)
  63. /* Stream Number */
  64. #define HDA_CL_SD_CTL_STRM_SHIFT 20
  65. #define HDA_CL_SD_CTL_STRM(x) (((x) & 0xf) << \
  66. HDA_CL_SD_CTL_STRM_SHIFT)
  67. #define HDA_CL_SD_CTL_INT(x) \
  68. (HDA_CL_SD_CTL_IOCE(x) | \
  69. HDA_CL_SD_CTL_FEIE(x) | \
  70. HDA_CL_SD_CTL_DEIE(x))
  71. #define HDA_CL_SD_CTL_INT_MASK \
  72. (HDA_CL_SD_CTL_IOCE(1) | \
  73. HDA_CL_SD_CTL_FEIE(1) | \
  74. HDA_CL_SD_CTL_DEIE(1))
  75. #define DMA_ADDRESS_128_BITS_ALIGNMENT 7
  76. #define BDL_ALIGN(x) ((x) >> DMA_ADDRESS_128_BITS_ALIGNMENT)
  77. /* Buffer Descriptor List Lower Base Address */
  78. #define HDA_CL_SD_BDLPLBA_SHIFT 7
  79. #define HDA_CL_SD_BDLPLBA_MASK GENMASK(31, 7)
  80. #define HDA_CL_SD_BDLPLBA(x) \
  81. ((BDL_ALIGN(lower_32_bits(x)) << HDA_CL_SD_BDLPLBA_SHIFT) & \
  82. HDA_CL_SD_BDLPLBA_MASK)
  83. /* Buffer Descriptor List Upper Base Address */
  84. #define HDA_CL_SD_BDLPUBA(x) \
  85. (upper_32_bits(x))
  86. /* Software Position in Buffer Enable */
  87. #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT 0
  88. #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK \
  89. (1 << HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT)
  90. #define HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(x) \
  91. (((x) << HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_SHIFT) & \
  92. HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK)
  93. #define HDA_CL_DMA_SD_INT_COMPLETE 0x4
  94. static int cl_skl_cldma_setup_bdle(struct snd_sof_dev *sdev,
  95. struct snd_dma_buffer *dmab_data,
  96. __le32 **bdlp, int size, int with_ioc)
  97. {
  98. phys_addr_t addr = virt_to_phys(dmab_data->area);
  99. __le32 *bdl = *bdlp;
  100. /*
  101. * This code is simplified by using one fragment of physical memory and assuming
  102. * all the code fits. This could be improved with scatter-gather but the firmware
  103. * size is limited by DSP memory anyways
  104. */
  105. bdl[0] = cpu_to_le32(lower_32_bits(addr));
  106. bdl[1] = cpu_to_le32(upper_32_bits(addr));
  107. bdl[2] = cpu_to_le32(size);
  108. bdl[3] = (!with_ioc) ? 0 : cpu_to_le32(0x01);
  109. return 1; /* one fragment */
  110. }
  111. static void cl_skl_cldma_stream_run(struct snd_sof_dev *sdev, bool enable)
  112. {
  113. int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
  114. unsigned char val;
  115. int retries;
  116. u32 run = enable ? 0x1 : 0;
  117. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  118. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CTL,
  119. HDA_CL_SD_CTL_RUN(1), HDA_CL_SD_CTL_RUN(run));
  120. retries = 300;
  121. do {
  122. udelay(3);
  123. /* waiting for hardware to report the stream Run bit set */
  124. val = snd_sof_dsp_read(sdev, HDA_DSP_BAR,
  125. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CTL);
  126. val &= HDA_CL_SD_CTL_RUN(1);
  127. if (enable && val)
  128. break;
  129. else if (!enable && !val)
  130. break;
  131. } while (--retries);
  132. if (retries == 0)
  133. dev_err(sdev->dev, "%s: failed to set Run bit=%d enable=%d\n",
  134. __func__, val, enable);
  135. }
  136. static void cl_skl_cldma_stream_clear(struct snd_sof_dev *sdev)
  137. {
  138. int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
  139. /* make sure Run bit is cleared before setting stream register */
  140. cl_skl_cldma_stream_run(sdev, 0);
  141. /* Disable the Interrupt On Completion, FIFO Error Interrupt,
  142. * Descriptor Error Interrupt and set the cldma stream number to 0.
  143. */
  144. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  145. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CTL,
  146. HDA_CL_SD_CTL_INT_MASK, HDA_CL_SD_CTL_INT(0));
  147. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  148. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CTL,
  149. HDA_CL_SD_CTL_STRM(0xf), HDA_CL_SD_CTL_STRM(0));
  150. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  151. sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL, HDA_CL_SD_BDLPLBA(0));
  152. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  153. sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU, 0);
  154. /* Set the Cyclic Buffer Length to 0. */
  155. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  156. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL, 0);
  157. /* Set the Last Valid Index. */
  158. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  159. sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI, 0);
  160. }
  161. static void cl_skl_cldma_setup_spb(struct snd_sof_dev *sdev,
  162. unsigned int size, bool enable)
  163. {
  164. int sd_offset = SOF_DSP_REG_CL_SPBFIFO;
  165. if (enable)
  166. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  167. sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL,
  168. HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK,
  169. HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(1));
  170. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  171. sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPIB, size);
  172. }
  173. static void cl_skl_cldma_set_intr(struct snd_sof_dev *sdev, bool enable)
  174. {
  175. u32 val = enable ? HDA_DSP_ADSPIC_CL_DMA : 0;
  176. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC,
  177. HDA_DSP_ADSPIC_CL_DMA, val);
  178. }
  179. static void cl_skl_cldma_cleanup_spb(struct snd_sof_dev *sdev)
  180. {
  181. int sd_offset = SOF_DSP_REG_CL_SPBFIFO;
  182. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  183. sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL,
  184. HDA_CL_SPBFIFO_SPBFCCTL_SPIBE_MASK,
  185. HDA_CL_SPBFIFO_SPBFCCTL_SPIBE(0));
  186. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  187. sd_offset + SOF_HDA_ADSP_REG_CL_SPBFIFO_SPIB, 0);
  188. }
  189. static void cl_skl_cldma_setup_controller(struct snd_sof_dev *sdev,
  190. struct snd_dma_buffer *dmab_bdl,
  191. unsigned int max_size, u32 count)
  192. {
  193. int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
  194. /* Clear the stream first and then set it. */
  195. cl_skl_cldma_stream_clear(sdev);
  196. /* setting the stream register */
  197. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  198. sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
  199. HDA_CL_SD_BDLPLBA(dmab_bdl->addr));
  200. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  201. sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
  202. HDA_CL_SD_BDLPUBA(dmab_bdl->addr));
  203. /* Set the Cyclic Buffer Length. */
  204. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  205. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL, max_size);
  206. /* Set the Last Valid Index. */
  207. snd_sof_dsp_write(sdev, HDA_DSP_BAR,
  208. sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI, count - 1);
  209. /* Set the Interrupt On Completion, FIFO Error Interrupt,
  210. * Descriptor Error Interrupt and the cldma stream number.
  211. */
  212. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  213. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CTL,
  214. HDA_CL_SD_CTL_INT_MASK, HDA_CL_SD_CTL_INT(1));
  215. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR,
  216. sd_offset + SOF_HDA_ADSP_REG_CL_SD_CTL,
  217. HDA_CL_SD_CTL_STRM(0xf),
  218. HDA_CL_SD_CTL_STRM(1));
  219. }
  220. static int cl_stream_prepare_skl(struct snd_sof_dev *sdev,
  221. struct snd_dma_buffer *dmab,
  222. struct snd_dma_buffer *dmab_bdl)
  223. {
  224. unsigned int bufsize = HDA_SKL_CLDMA_MAX_BUFFER_SIZE;
  225. __le32 *bdl;
  226. int frags;
  227. int ret;
  228. ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, sdev->dev, bufsize, dmab);
  229. if (ret < 0) {
  230. dev_err(sdev->dev, "%s: failed to alloc fw buffer: %x\n", __func__, ret);
  231. return ret;
  232. }
  233. ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, sdev->dev, bufsize, dmab_bdl);
  234. if (ret < 0) {
  235. dev_err(sdev->dev, "%s: failed to alloc blde: %x\n", __func__, ret);
  236. snd_dma_free_pages(dmab);
  237. return ret;
  238. }
  239. bdl = (__le32 *)dmab_bdl->area;
  240. frags = cl_skl_cldma_setup_bdle(sdev, dmab, &bdl, bufsize, 1);
  241. cl_skl_cldma_setup_controller(sdev, dmab_bdl, bufsize, frags);
  242. return ret;
  243. }
  244. static void cl_cleanup_skl(struct snd_sof_dev *sdev,
  245. struct snd_dma_buffer *dmab,
  246. struct snd_dma_buffer *dmab_bdl)
  247. {
  248. cl_skl_cldma_cleanup_spb(sdev);
  249. cl_skl_cldma_stream_clear(sdev);
  250. snd_dma_free_pages(dmab);
  251. snd_dma_free_pages(dmab_bdl);
  252. }
  253. static int cl_dsp_init_skl(struct snd_sof_dev *sdev,
  254. struct snd_dma_buffer *dmab,
  255. struct snd_dma_buffer *dmab_bdl)
  256. {
  257. struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
  258. const struct sof_intel_dsp_desc *chip = hda->desc;
  259. unsigned int status;
  260. u32 flags;
  261. int ret;
  262. /* check if the init_core is already enabled, if yes, reset and make it run,
  263. * if not, powerdown and enable it again.
  264. */
  265. if (hda_dsp_core_is_enabled(sdev, chip->init_core_mask)) {
  266. /* if enabled, reset it, and run the init_core. */
  267. ret = hda_dsp_core_stall_reset(sdev, chip->init_core_mask);
  268. if (ret < 0)
  269. goto err;
  270. ret = hda_dsp_core_run(sdev, chip->init_core_mask);
  271. if (ret < 0) {
  272. dev_err(sdev->dev, "%s: dsp core start failed %d\n", __func__, ret);
  273. goto err;
  274. }
  275. } else {
  276. /* if not enabled, power down it first and then powerup and run
  277. * the init_core.
  278. */
  279. ret = hda_dsp_core_reset_power_down(sdev, chip->init_core_mask);
  280. if (ret < 0) {
  281. dev_err(sdev->dev, "%s: dsp core0 disable fail: %d\n", __func__, ret);
  282. goto err;
  283. }
  284. ret = hda_dsp_enable_core(sdev, chip->init_core_mask);
  285. if (ret < 0) {
  286. dev_err(sdev->dev, "%s: dsp core0 enable fail: %d\n", __func__, ret);
  287. goto err;
  288. }
  289. }
  290. /* prepare DMA for code loader stream */
  291. ret = cl_stream_prepare_skl(sdev, dmab, dmab_bdl);
  292. if (ret < 0) {
  293. dev_err(sdev->dev, "%s: dma prepare fw loading err: %x\n", __func__, ret);
  294. return ret;
  295. }
  296. /* enable the interrupt */
  297. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, HDA_DSP_REG_ADSPIC,
  298. HDA_DSP_ADSPIC_IPC, HDA_DSP_ADSPIC_IPC);
  299. /* enable IPC DONE interrupt */
  300. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl,
  301. HDA_DSP_REG_HIPCCTL_DONE,
  302. HDA_DSP_REG_HIPCCTL_DONE);
  303. /* enable IPC BUSY interrupt */
  304. snd_sof_dsp_update_bits(sdev, HDA_DSP_BAR, chip->ipc_ctl,
  305. HDA_DSP_REG_HIPCCTL_BUSY,
  306. HDA_DSP_REG_HIPCCTL_BUSY);
  307. /* polling the ROM init status information. */
  308. ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR,
  309. chip->rom_status_reg, status,
  310. (FSR_TO_STATE_CODE(status)
  311. == FSR_STATE_INIT_DONE),
  312. HDA_DSP_REG_POLL_INTERVAL_US,
  313. chip->rom_init_timeout *
  314. USEC_PER_MSEC);
  315. if (ret < 0)
  316. goto err;
  317. return ret;
  318. err:
  319. flags = SOF_DBG_DUMP_PCI | SOF_DBG_DUMP_MBOX;
  320. snd_sof_dsp_dbg_dump(sdev, "Boot failed\n", flags);
  321. cl_cleanup_skl(sdev, dmab, dmab_bdl);
  322. hda_dsp_core_reset_power_down(sdev, chip->init_core_mask);
  323. return ret;
  324. }
  325. static void cl_skl_cldma_fill_buffer(struct snd_sof_dev *sdev,
  326. struct snd_dma_buffer *dmab,
  327. unsigned int bufsize,
  328. unsigned int copysize,
  329. const void *curr_pos,
  330. bool intr_enable)
  331. {
  332. struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
  333. /* copy the image into the buffer with the maximum buffer size. */
  334. unsigned int size = (bufsize == copysize) ? bufsize : copysize;
  335. memcpy(dmab->area, curr_pos, size);
  336. /* Set the wait condition for every load. */
  337. hda->code_loading = 1;
  338. /* Set the interrupt. */
  339. if (intr_enable)
  340. cl_skl_cldma_set_intr(sdev, true);
  341. /* Set the SPB. */
  342. cl_skl_cldma_setup_spb(sdev, size, true);
  343. /* Trigger the code loading stream. */
  344. cl_skl_cldma_stream_run(sdev, true);
  345. }
  346. static int cl_skl_cldma_wait_interruptible(struct snd_sof_dev *sdev,
  347. bool intr_wait)
  348. {
  349. struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
  350. const struct sof_intel_dsp_desc *chip = hda->desc;
  351. int sd_offset = SOF_HDA_ADSP_LOADER_BASE;
  352. u8 cl_dma_intr_status;
  353. /*
  354. * Wait for CLDMA interrupt to inform the binary segment transfer is
  355. * complete.
  356. */
  357. if (!wait_event_timeout(hda->waitq, !hda->code_loading,
  358. msecs_to_jiffies(HDA_SKL_WAIT_TIMEOUT))) {
  359. dev_err(sdev->dev, "cldma copy timeout\n");
  360. dev_err(sdev->dev, "ROM code=%#x: FW status=%#x\n",
  361. snd_sof_dsp_read(sdev, HDA_DSP_BAR, HDA_DSP_SRAM_REG_ROM_ERROR),
  362. snd_sof_dsp_read(sdev, HDA_DSP_BAR, chip->rom_status_reg));
  363. return -EIO;
  364. }
  365. /* now check DMA interrupt status */
  366. cl_dma_intr_status = snd_sof_dsp_read(sdev, HDA_DSP_BAR,
  367. sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS);
  368. if (!(cl_dma_intr_status & HDA_CL_DMA_SD_INT_COMPLETE)) {
  369. dev_err(sdev->dev, "cldma copy failed\n");
  370. return -EIO;
  371. }
  372. dev_dbg(sdev->dev, "cldma buffer copy complete\n");
  373. return 0;
  374. }
  375. static int
  376. cl_skl_cldma_copy_to_buf(struct snd_sof_dev *sdev,
  377. struct snd_dma_buffer *dmab,
  378. const void *bin,
  379. u32 total_size, u32 bufsize)
  380. {
  381. unsigned int bytes_left = total_size;
  382. const void *curr_pos = bin;
  383. int ret;
  384. if (total_size <= 0)
  385. return -EINVAL;
  386. while (bytes_left > 0) {
  387. if (bytes_left > bufsize) {
  388. dev_dbg(sdev->dev, "cldma copy %#x bytes\n", bufsize);
  389. cl_skl_cldma_fill_buffer(sdev, dmab, bufsize, bufsize, curr_pos, true);
  390. ret = cl_skl_cldma_wait_interruptible(sdev, false);
  391. if (ret < 0) {
  392. dev_err(sdev->dev, "%s: fw failed to load. %#x bytes remaining\n",
  393. __func__, bytes_left);
  394. return ret;
  395. }
  396. bytes_left -= bufsize;
  397. curr_pos += bufsize;
  398. } else {
  399. dev_dbg(sdev->dev, "cldma copy %#x bytes\n", bytes_left);
  400. cl_skl_cldma_set_intr(sdev, false);
  401. cl_skl_cldma_fill_buffer(sdev, dmab, bufsize, bytes_left, curr_pos, false);
  402. return 0;
  403. }
  404. }
  405. return bytes_left;
  406. }
  407. static int cl_copy_fw_skl(struct snd_sof_dev *sdev,
  408. struct snd_dma_buffer *dmab)
  409. {
  410. struct snd_sof_pdata *plat_data = sdev->pdata;
  411. const struct firmware *fw = plat_data->fw;
  412. struct firmware stripped_firmware;
  413. unsigned int bufsize = HDA_SKL_CLDMA_MAX_BUFFER_SIZE;
  414. int ret;
  415. stripped_firmware.data = plat_data->fw->data + plat_data->fw_offset;
  416. stripped_firmware.size = plat_data->fw->size - plat_data->fw_offset;
  417. dev_dbg(sdev->dev, "firmware size: %#zx buffer size %#x\n", fw->size, bufsize);
  418. ret = cl_skl_cldma_copy_to_buf(sdev, dmab, stripped_firmware.data,
  419. stripped_firmware.size, bufsize);
  420. if (ret < 0)
  421. dev_err(sdev->dev, "%s: fw copy failed %d\n", __func__, ret);
  422. return ret;
  423. }
  424. int hda_dsp_cl_boot_firmware_skl(struct snd_sof_dev *sdev)
  425. {
  426. struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
  427. const struct sof_intel_dsp_desc *chip = hda->desc;
  428. struct snd_dma_buffer dmab_bdl;
  429. struct snd_dma_buffer dmab;
  430. unsigned int reg;
  431. u32 flags;
  432. int ret;
  433. ret = cl_dsp_init_skl(sdev, &dmab, &dmab_bdl);
  434. /* retry enabling core and ROM load. seemed to help */
  435. if (ret < 0) {
  436. ret = cl_dsp_init_skl(sdev, &dmab, &dmab_bdl);
  437. if (ret < 0) {
  438. dev_err(sdev->dev, "Error code=%#x: FW status=%#x\n",
  439. snd_sof_dsp_read(sdev, HDA_DSP_BAR, HDA_DSP_SRAM_REG_ROM_ERROR),
  440. snd_sof_dsp_read(sdev, HDA_DSP_BAR, chip->rom_status_reg));
  441. dev_err(sdev->dev, "Core En/ROM load fail:%d\n", ret);
  442. return ret;
  443. }
  444. }
  445. dev_dbg(sdev->dev, "ROM init successful\n");
  446. /* at this point DSP ROM has been initialized and should be ready for
  447. * code loading and firmware boot
  448. */
  449. ret = cl_copy_fw_skl(sdev, &dmab);
  450. if (ret < 0) {
  451. dev_err(sdev->dev, "%s: load firmware failed : %d\n", __func__, ret);
  452. goto err;
  453. }
  454. ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_BAR,
  455. chip->rom_status_reg, reg,
  456. (FSR_TO_STATE_CODE(reg)
  457. == FSR_STATE_ROM_BASEFW_ENTERED),
  458. HDA_DSP_REG_POLL_INTERVAL_US,
  459. HDA_DSP_BASEFW_TIMEOUT_US);
  460. dev_dbg(sdev->dev, "Firmware download successful, booting...\n");
  461. cl_skl_cldma_stream_run(sdev, false);
  462. cl_cleanup_skl(sdev, &dmab, &dmab_bdl);
  463. if (!ret)
  464. return chip->init_core_mask;
  465. return ret;
  466. err:
  467. flags = SOF_DBG_DUMP_PCI | SOF_DBG_DUMP_MBOX;
  468. snd_sof_dsp_dbg_dump(sdev, "Boot failed\n", flags);
  469. /* power down DSP */
  470. hda_dsp_core_reset_power_down(sdev, chip->init_core_mask);
  471. cl_skl_cldma_stream_run(sdev, false);
  472. cl_cleanup_skl(sdev, &dmab, &dmab_bdl);
  473. dev_err(sdev->dev, "%s: load fw failed err: %d\n", __func__, ret);
  474. return ret;
  475. }