msm_audio_ion.c 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2013-2019, The Linux Foundation. All rights reserved.
  4. */
  5. #include <linux/init.h>
  6. #include <linux/kernel.h>
  7. #include <linux/module.h>
  8. #include <linux/err.h>
  9. #include <linux/delay.h>
  10. #include <linux/slab.h>
  11. #include <linux/mutex.h>
  12. #include <linux/list.h>
  13. #include <linux/dma-mapping.h>
  14. #include <linux/dma-buf.h>
  15. #include <linux/platform_device.h>
  16. #include <linux/of_device.h>
  17. #include <linux/export.h>
  18. #include <linux/ion_kernel.h>
  19. #include <ipc/apr.h>
  20. #include <dsp/msm_audio_ion.h>
  21. #define MSM_AUDIO_ION_PROBED (1 << 0)
  22. #define MSM_AUDIO_ION_PHYS_ADDR(alloc_data) \
  23. alloc_data->table->sgl->dma_address
  24. #define MSM_AUDIO_SMMU_SID_OFFSET 32
  25. struct msm_audio_ion_private {
  26. bool smmu_enabled;
  27. struct device *cb_dev;
  28. u8 device_status;
  29. struct list_head alloc_list;
  30. struct mutex list_mutex;
  31. u64 smmu_sid_bits;
  32. u32 smmu_version;
  33. };
  34. struct msm_audio_alloc_data {
  35. size_t len;
  36. void *vaddr;
  37. struct dma_buf *dma_buf;
  38. struct dma_buf_attachment *attach;
  39. struct sg_table *table;
  40. struct list_head list;
  41. };
  42. static struct msm_audio_ion_private msm_audio_ion_data = {0,};
  43. static void msm_audio_ion_add_allocation(
  44. struct msm_audio_ion_private *msm_audio_ion_data,
  45. struct msm_audio_alloc_data *alloc_data)
  46. {
  47. /*
  48. * Since these APIs can be invoked by multiple
  49. * clients, there is need to make sure the list
  50. * of allocations is always protected
  51. */
  52. mutex_lock(&(msm_audio_ion_data->list_mutex));
  53. list_add_tail(&(alloc_data->list),
  54. &(msm_audio_ion_data->alloc_list));
  55. mutex_unlock(&(msm_audio_ion_data->list_mutex));
  56. }
  57. static int msm_audio_dma_buf_map(struct dma_buf *dma_buf,
  58. dma_addr_t *addr, size_t *len)
  59. {
  60. struct msm_audio_alloc_data *alloc_data;
  61. struct device *cb_dev;
  62. unsigned long ionflag = 0;
  63. int rc = 0;
  64. cb_dev = msm_audio_ion_data.cb_dev;
  65. /* Data required per buffer mapping */
  66. alloc_data = kzalloc(sizeof(*alloc_data), GFP_KERNEL);
  67. if (!alloc_data)
  68. return -ENOMEM;
  69. alloc_data->dma_buf = dma_buf;
  70. alloc_data->len = dma_buf->size;
  71. *len = dma_buf->size;
  72. /* Attach the dma_buf to context bank device */
  73. alloc_data->attach = dma_buf_attach(alloc_data->dma_buf,
  74. cb_dev);
  75. if (IS_ERR(alloc_data->attach)) {
  76. rc = PTR_ERR(alloc_data->attach);
  77. dev_err(cb_dev,
  78. "%s: Fail to attach dma_buf to CB, rc = %d\n",
  79. __func__, rc);
  80. goto free_alloc_data;
  81. }
  82. /* For uncached buffers, avoid cache maintanance */
  83. rc = dma_buf_get_flags(alloc_data->dma_buf, &ionflag);
  84. if (rc) {
  85. dev_err(cb_dev, "%s: dma_buf_get_flags failed: %d\n",
  86. __func__, rc);
  87. goto detach_dma_buf;
  88. }
  89. if (!(ionflag & ION_FLAG_CACHED))
  90. alloc_data->attach->dma_map_attrs |= DMA_ATTR_SKIP_CPU_SYNC;
  91. /*
  92. * Get the scatter-gather list.
  93. * There is no info as this is a write buffer or
  94. * read buffer, hence the request is bi-directional
  95. * to accommodate both read and write mappings.
  96. */
  97. alloc_data->table = dma_buf_map_attachment(alloc_data->attach,
  98. DMA_BIDIRECTIONAL);
  99. if (IS_ERR(alloc_data->table)) {
  100. rc = PTR_ERR(alloc_data->table);
  101. dev_err(cb_dev,
  102. "%s: Fail to map attachment, rc = %d\n",
  103. __func__, rc);
  104. goto detach_dma_buf;
  105. }
  106. /* physical address from mapping */
  107. *addr = MSM_AUDIO_ION_PHYS_ADDR(alloc_data);
  108. msm_audio_ion_add_allocation(&msm_audio_ion_data,
  109. alloc_data);
  110. return rc;
  111. detach_dma_buf:
  112. dma_buf_detach(alloc_data->dma_buf,
  113. alloc_data->attach);
  114. free_alloc_data:
  115. kfree(alloc_data);
  116. return rc;
  117. }
  118. static int msm_audio_dma_buf_unmap(struct dma_buf *dma_buf)
  119. {
  120. int rc = 0;
  121. struct msm_audio_alloc_data *alloc_data = NULL;
  122. struct list_head *ptr, *next;
  123. struct device *cb_dev = msm_audio_ion_data.cb_dev;
  124. bool found = false;
  125. /*
  126. * Though list_for_each_safe is delete safe, lock
  127. * should be explicitly acquired to avoid race condition
  128. * on adding elements to the list.
  129. */
  130. mutex_lock(&(msm_audio_ion_data.list_mutex));
  131. list_for_each_safe(ptr, next,
  132. &(msm_audio_ion_data.alloc_list)) {
  133. alloc_data = list_entry(ptr, struct msm_audio_alloc_data,
  134. list);
  135. if (alloc_data->dma_buf == dma_buf) {
  136. found = true;
  137. dma_buf_unmap_attachment(alloc_data->attach,
  138. alloc_data->table,
  139. DMA_BIDIRECTIONAL);
  140. dma_buf_detach(alloc_data->dma_buf,
  141. alloc_data->attach);
  142. dma_buf_put(alloc_data->dma_buf);
  143. list_del(&(alloc_data->list));
  144. kfree(alloc_data);
  145. break;
  146. }
  147. }
  148. mutex_unlock(&(msm_audio_ion_data.list_mutex));
  149. if (!found) {
  150. dev_err(cb_dev,
  151. "%s: cannot find allocation, dma_buf %pK",
  152. __func__, dma_buf);
  153. rc = -EINVAL;
  154. }
  155. return rc;
  156. }
  157. static int msm_audio_ion_get_phys(struct dma_buf *dma_buf,
  158. dma_addr_t *addr, size_t *len)
  159. {
  160. int rc = 0;
  161. rc = msm_audio_dma_buf_map(dma_buf, addr, len);
  162. if (rc) {
  163. pr_err("%s: failed to map DMA buf, err = %d\n",
  164. __func__, rc);
  165. goto err;
  166. }
  167. if (msm_audio_ion_data.smmu_enabled) {
  168. /* Append the SMMU SID information to the IOVA address */
  169. *addr |= msm_audio_ion_data.smmu_sid_bits;
  170. }
  171. pr_debug("phys=%pK, len=%zd, rc=%d\n", &(*addr), *len, rc);
  172. err:
  173. return rc;
  174. }
  175. int msm_audio_ion_get_smmu_info(struct device **cb_dev,
  176. u64 *smmu_sid)
  177. {
  178. if (!cb_dev || !smmu_sid) {
  179. pr_err("%s: Invalid params\n",
  180. __func__);
  181. return -EINVAL;
  182. }
  183. if (!msm_audio_ion_data.cb_dev ||
  184. !msm_audio_ion_data.smmu_sid_bits) {
  185. pr_err("%s: Params not initialized\n",
  186. __func__);
  187. return -EINVAL;
  188. }
  189. *cb_dev = msm_audio_ion_data.cb_dev;
  190. *smmu_sid = msm_audio_ion_data.smmu_sid_bits;
  191. return 0;
  192. }
  193. static void *msm_audio_ion_map_kernel(struct dma_buf *dma_buf)
  194. {
  195. int rc = 0;
  196. void *addr = NULL;
  197. struct msm_audio_alloc_data *alloc_data = NULL;
  198. rc = dma_buf_begin_cpu_access(dma_buf, DMA_BIDIRECTIONAL);
  199. if (rc) {
  200. pr_err("%s: kmap dma_buf_begin_cpu_access fail\n", __func__);
  201. goto exit;
  202. }
  203. addr = dma_buf_vmap(dma_buf);
  204. if (!addr) {
  205. pr_err("%s: kernel mapping of dma_buf failed\n",
  206. __func__);
  207. goto exit;
  208. }
  209. /*
  210. * TBD: remove the below section once new API
  211. * for mapping kernel virtual address is available.
  212. */
  213. mutex_lock(&(msm_audio_ion_data.list_mutex));
  214. list_for_each_entry(alloc_data, &(msm_audio_ion_data.alloc_list),
  215. list) {
  216. if (alloc_data->dma_buf == dma_buf) {
  217. alloc_data->vaddr = addr;
  218. break;
  219. }
  220. }
  221. mutex_unlock(&(msm_audio_ion_data.list_mutex));
  222. exit:
  223. return addr;
  224. }
  225. static int msm_audio_ion_unmap_kernel(struct dma_buf *dma_buf)
  226. {
  227. int rc = 0;
  228. void *vaddr = NULL;
  229. struct msm_audio_alloc_data *alloc_data = NULL;
  230. struct device *cb_dev = msm_audio_ion_data.cb_dev;
  231. /*
  232. * TBD: remove the below section once new API
  233. * for unmapping kernel virtual address is available.
  234. */
  235. mutex_lock(&(msm_audio_ion_data.list_mutex));
  236. list_for_each_entry(alloc_data, &(msm_audio_ion_data.alloc_list),
  237. list) {
  238. if (alloc_data->dma_buf == dma_buf) {
  239. vaddr = alloc_data->vaddr;
  240. break;
  241. }
  242. }
  243. mutex_unlock(&(msm_audio_ion_data.list_mutex));
  244. if (!vaddr) {
  245. dev_err(cb_dev,
  246. "%s: cannot find allocation for dma_buf %pK",
  247. __func__, dma_buf);
  248. rc = -EINVAL;
  249. goto err;
  250. }
  251. dma_buf_vunmap(dma_buf, vaddr);
  252. rc = dma_buf_end_cpu_access(dma_buf, DMA_BIDIRECTIONAL);
  253. if (rc) {
  254. dev_err(cb_dev, "%s: kmap dma_buf_end_cpu_access fail\n",
  255. __func__);
  256. goto err;
  257. }
  258. err:
  259. return rc;
  260. }
  261. static int msm_audio_ion_map_buf(struct dma_buf *dma_buf, dma_addr_t *paddr,
  262. size_t *plen, void **vaddr)
  263. {
  264. int rc = 0;
  265. rc = msm_audio_ion_get_phys(dma_buf, paddr, plen);
  266. if (rc) {
  267. pr_err("%s: ION Get Physical for AUDIO failed, rc = %d\n",
  268. __func__, rc);
  269. dma_buf_put(dma_buf);
  270. goto err;
  271. }
  272. *vaddr = msm_audio_ion_map_kernel(dma_buf);
  273. if (IS_ERR_OR_NULL(*vaddr)) {
  274. pr_err("%s: ION memory mapping for AUDIO failed\n", __func__);
  275. rc = -ENOMEM;
  276. msm_audio_dma_buf_unmap(dma_buf);
  277. goto err;
  278. }
  279. err:
  280. return rc;
  281. }
  282. static u32 msm_audio_ion_get_smmu_sid_mode32(void)
  283. {
  284. if (msm_audio_ion_data.smmu_enabled)
  285. return upper_32_bits(msm_audio_ion_data.smmu_sid_bits);
  286. else
  287. return 0;
  288. }
  289. /**
  290. * msm_audio_ion_alloc -
  291. * Allocs ION memory for given client name
  292. *
  293. * @dma_buf: dma_buf for the ION memory
  294. * @bufsz: buffer size
  295. * @paddr: Physical address to be assigned with allocated region
  296. * @plen: length of allocated region to be assigned
  297. * vaddr: virtual address to be assigned
  298. *
  299. * Returns 0 on success or error on failure
  300. */
  301. int msm_audio_ion_alloc(struct dma_buf **dma_buf, size_t bufsz,
  302. dma_addr_t *paddr, size_t *plen, void **vaddr)
  303. {
  304. int rc = -EINVAL;
  305. unsigned long err_ion_ptr = 0;
  306. if (!(msm_audio_ion_data.device_status & MSM_AUDIO_ION_PROBED)) {
  307. pr_debug("%s:probe is not done, deferred\n", __func__);
  308. return -EPROBE_DEFER;
  309. }
  310. if (!dma_buf || !paddr || !vaddr || !bufsz || !plen) {
  311. pr_err("%s: Invalid params\n", __func__);
  312. return -EINVAL;
  313. }
  314. if (msm_audio_ion_data.smmu_enabled == true) {
  315. pr_debug("%s: system heap is used\n", __func__);
  316. *dma_buf = ion_alloc(bufsz, ION_HEAP(ION_SYSTEM_HEAP_ID), 0);
  317. } else {
  318. pr_debug("%s: audio heap is used\n", __func__);
  319. *dma_buf = ion_alloc(bufsz, ION_HEAP(ION_AUDIO_HEAP_ID), 0);
  320. }
  321. if (IS_ERR_OR_NULL((void *)(*dma_buf))) {
  322. if (IS_ERR((void *)(*dma_buf)))
  323. err_ion_ptr = PTR_ERR((int *)(*dma_buf));
  324. pr_err("%s: ION alloc fail err ptr=%ld, smmu_enabled=%d\n",
  325. __func__, err_ion_ptr, msm_audio_ion_data.smmu_enabled);
  326. rc = -ENOMEM;
  327. goto err;
  328. }
  329. rc = msm_audio_ion_map_buf(*dma_buf, paddr, plen, vaddr);
  330. if (rc) {
  331. pr_err("%s: failed to map ION buf, rc = %d\n", __func__, rc);
  332. goto err;
  333. }
  334. pr_debug("%s: mapped address = %pK, size=%zd\n", __func__,
  335. *vaddr, bufsz);
  336. memset(*vaddr, 0, bufsz);
  337. err:
  338. return rc;
  339. }
  340. EXPORT_SYMBOL(msm_audio_ion_alloc);
  341. /**
  342. * msm_audio_ion_dma_map -
  343. * Memory maps for a given DMA buffer
  344. *
  345. * @phys_addr: Physical address of DMA buffer to be mapped
  346. * @iova_base: IOVA address of memory mapped DMA buffer
  347. * @size: buffer size
  348. * @dir: DMA direction
  349. * Returns 0 on success or error on failure
  350. */
  351. int msm_audio_ion_dma_map(dma_addr_t *phys_addr, dma_addr_t *iova_base,
  352. u32 size, enum dma_data_direction dir)
  353. {
  354. dma_addr_t iova;
  355. struct device *cb_dev = msm_audio_ion_data.cb_dev;
  356. if (!phys_addr || !iova_base || !size)
  357. return -EINVAL;
  358. iova = dma_map_resource(cb_dev, *phys_addr, size,
  359. dir, 0);
  360. if (dma_mapping_error(cb_dev, iova)) {
  361. pr_err("%s: dma_mapping_error\n", __func__);
  362. return -EIO;
  363. }
  364. pr_debug("%s: dma_mapping_success iova:0x%lx\n", __func__,
  365. (unsigned long)iova);
  366. if (msm_audio_ion_data.smmu_enabled)
  367. /* Append the SMMU SID information to the IOVA address */
  368. iova |= msm_audio_ion_data.smmu_sid_bits;
  369. *iova_base = iova;
  370. return 0;
  371. }
  372. EXPORT_SYMBOL(msm_audio_ion_dma_map);
  373. /**
  374. * msm_audio_ion_import-
  375. * Import ION buffer with given file descriptor
  376. *
  377. * @dma_buf: dma_buf for the ION memory
  378. * @fd: file descriptor for the ION memory
  379. * @ionflag: flags associated with ION buffer
  380. * @bufsz: buffer size
  381. * @paddr: Physical address to be assigned with allocated region
  382. * @plen: length of allocated region to be assigned
  383. * vaddr: virtual address to be assigned
  384. *
  385. * Returns 0 on success or error on failure
  386. */
  387. int msm_audio_ion_import(struct dma_buf **dma_buf, int fd,
  388. unsigned long *ionflag, size_t bufsz,
  389. dma_addr_t *paddr, size_t *plen, void **vaddr)
  390. {
  391. int rc = 0;
  392. if (!(msm_audio_ion_data.device_status & MSM_AUDIO_ION_PROBED)) {
  393. pr_debug("%s: probe is not done, deferred\n", __func__);
  394. return -EPROBE_DEFER;
  395. }
  396. if (!dma_buf || !paddr || !vaddr || !plen) {
  397. pr_err("%s: Invalid params\n", __func__);
  398. return -EINVAL;
  399. }
  400. /* bufsz should be 0 and fd shouldn't be 0 as of now */
  401. *dma_buf = dma_buf_get(fd);
  402. pr_debug("%s: dma_buf =%pK, fd=%d\n", __func__, *dma_buf, fd);
  403. if (IS_ERR_OR_NULL((void *)(*dma_buf))) {
  404. pr_err("%s: dma_buf_get failed\n", __func__);
  405. rc = -EINVAL;
  406. goto err;
  407. }
  408. if (ionflag != NULL) {
  409. rc = dma_buf_get_flags(*dma_buf, ionflag);
  410. if (rc) {
  411. pr_err("%s: could not get flags for the dma_buf\n",
  412. __func__);
  413. goto err_ion_flag;
  414. }
  415. }
  416. rc = msm_audio_ion_map_buf(*dma_buf, paddr, plen, vaddr);
  417. if (rc) {
  418. pr_err("%s: failed to map ION buf, rc = %d\n", __func__, rc);
  419. goto err;
  420. }
  421. pr_debug("%s: mapped address = %pK, size=%zd\n", __func__,
  422. *vaddr, bufsz);
  423. return 0;
  424. err_ion_flag:
  425. dma_buf_put(*dma_buf);
  426. err:
  427. *dma_buf = NULL;
  428. return rc;
  429. }
  430. EXPORT_SYMBOL(msm_audio_ion_import);
  431. /**
  432. * msm_audio_ion_free -
  433. * fress ION memory for given client and handle
  434. *
  435. * @dma_buf: dma_buf for the ION memory
  436. *
  437. * Returns 0 on success or error on failure
  438. */
  439. int msm_audio_ion_free(struct dma_buf *dma_buf)
  440. {
  441. int ret = 0;
  442. if (!dma_buf) {
  443. pr_err("%s: dma_buf invalid\n", __func__);
  444. return -EINVAL;
  445. }
  446. ret = msm_audio_ion_unmap_kernel(dma_buf);
  447. if (ret)
  448. return ret;
  449. msm_audio_dma_buf_unmap(dma_buf);
  450. return 0;
  451. }
  452. EXPORT_SYMBOL(msm_audio_ion_free);
  453. /**
  454. * msm_audio_ion_mmap -
  455. * Audio ION memory map
  456. *
  457. * @abuff: audio buf pointer
  458. * @vma: virtual mem area
  459. *
  460. * Returns 0 on success or error on failure
  461. */
  462. int msm_audio_ion_mmap(struct audio_buffer *abuff,
  463. struct vm_area_struct *vma)
  464. {
  465. struct msm_audio_alloc_data *alloc_data = NULL;
  466. struct sg_table *table;
  467. unsigned long addr = vma->vm_start;
  468. unsigned long offset = vma->vm_pgoff * PAGE_SIZE;
  469. struct scatterlist *sg;
  470. unsigned int i;
  471. struct page *page;
  472. int ret = 0;
  473. bool found = false;
  474. struct device *cb_dev = msm_audio_ion_data.cb_dev;
  475. mutex_lock(&(msm_audio_ion_data.list_mutex));
  476. list_for_each_entry(alloc_data, &(msm_audio_ion_data.alloc_list),
  477. list) {
  478. if (alloc_data->dma_buf == abuff->dma_buf) {
  479. found = true;
  480. table = alloc_data->table;
  481. break;
  482. }
  483. }
  484. mutex_unlock(&(msm_audio_ion_data.list_mutex));
  485. if (!found) {
  486. dev_err(cb_dev,
  487. "%s: cannot find allocation, dma_buf %pK",
  488. __func__, abuff->dma_buf);
  489. return -EINVAL;
  490. }
  491. /* uncached */
  492. vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot);
  493. /* We need to check if a page is associated with this sg list because:
  494. * If the allocation came from a carveout we currently don't have
  495. * pages associated with carved out memory. This might change in the
  496. * future and we can remove this check and the else statement.
  497. */
  498. page = sg_page(table->sgl);
  499. if (page) {
  500. pr_debug("%s: page is NOT null\n", __func__);
  501. for_each_sg(table->sgl, sg, table->nents, i) {
  502. unsigned long remainder = vma->vm_end - addr;
  503. unsigned long len = sg->length;
  504. page = sg_page(sg);
  505. if (offset >= len) {
  506. offset -= len;
  507. continue;
  508. } else if (offset) {
  509. page += offset / PAGE_SIZE;
  510. len -= offset;
  511. offset = 0;
  512. }
  513. len = min(len, remainder);
  514. pr_debug("vma=%pK, addr=%x len=%ld vm_start=%x vm_end=%x vm_page_prot=%lu\n",
  515. vma, (unsigned int)addr, len,
  516. (unsigned int)vma->vm_start,
  517. (unsigned int)vma->vm_end,
  518. (unsigned long)pgprot_val(vma->vm_page_prot));
  519. remap_pfn_range(vma, addr, page_to_pfn(page), len,
  520. vma->vm_page_prot);
  521. addr += len;
  522. if (addr >= vma->vm_end)
  523. return 0;
  524. }
  525. } else {
  526. pr_debug("%s: page is NULL\n", __func__);
  527. ret = -EINVAL;
  528. }
  529. return ret;
  530. }
  531. EXPORT_SYMBOL(msm_audio_ion_mmap);
  532. /**
  533. * msm_audio_ion_cache_operations-
  534. * Cache operations on cached Audio ION buffers
  535. *
  536. * @abuff: audio buf pointer
  537. * @cache_op: cache operation to be performed
  538. *
  539. * Returns 0 on success or error on failure
  540. */
  541. int msm_audio_ion_cache_operations(struct audio_buffer *abuff, int cache_op)
  542. {
  543. unsigned long ionflag = 0;
  544. int rc = 0;
  545. if (!abuff) {
  546. pr_err("%s: Invalid params: %pK\n", __func__, abuff);
  547. return -EINVAL;
  548. }
  549. rc = dma_buf_get_flags(abuff->dma_buf, &ionflag);
  550. if (rc) {
  551. pr_err("%s: dma_buf_get_flags failed: %d\n", __func__, rc);
  552. goto cache_op_failed;
  553. }
  554. /* Has to be CACHED */
  555. if (ionflag & ION_FLAG_CACHED) {
  556. /* MSM_AUDIO_ION_INV_CACHES or MSM_AUDIO_ION_CLEAN_CACHES */
  557. switch (cache_op) {
  558. case MSM_AUDIO_ION_INV_CACHES:
  559. case MSM_AUDIO_ION_CLEAN_CACHES:
  560. dma_buf_begin_cpu_access(abuff->dma_buf,
  561. DMA_BIDIRECTIONAL);
  562. dma_buf_end_cpu_access(abuff->dma_buf,
  563. DMA_BIDIRECTIONAL);
  564. break;
  565. default:
  566. pr_err("%s: Invalid cache operation %d\n",
  567. __func__, cache_op);
  568. }
  569. } else {
  570. pr_err("%s: Cache ops called on uncached buffer: %pK\n",
  571. __func__, abuff->dma_buf);
  572. rc = -EINVAL;
  573. }
  574. cache_op_failed:
  575. return rc;
  576. }
  577. EXPORT_SYMBOL(msm_audio_ion_cache_operations);
  578. /**
  579. * msm_audio_populate_upper_32_bits -
  580. * retrieve upper 32bits of 64bit address
  581. *
  582. * @pa: 64bit physical address
  583. *
  584. */
  585. u32 msm_audio_populate_upper_32_bits(dma_addr_t pa)
  586. {
  587. if (sizeof(dma_addr_t) == sizeof(u32))
  588. return msm_audio_ion_get_smmu_sid_mode32();
  589. else
  590. return upper_32_bits(pa);
  591. }
  592. EXPORT_SYMBOL(msm_audio_populate_upper_32_bits);
  593. static int msm_audio_smmu_init(struct device *dev)
  594. {
  595. INIT_LIST_HEAD(&msm_audio_ion_data.alloc_list);
  596. mutex_init(&(msm_audio_ion_data.list_mutex));
  597. return 0;
  598. }
  599. static const struct of_device_id msm_audio_ion_dt_match[] = {
  600. { .compatible = "qcom,msm-audio-ion" },
  601. { }
  602. };
  603. MODULE_DEVICE_TABLE(of, msm_audio_ion_dt_match);
  604. static int msm_audio_ion_probe(struct platform_device *pdev)
  605. {
  606. int rc = 0;
  607. u64 smmu_sid = 0;
  608. u64 smmu_sid_mask = 0;
  609. const char *msm_audio_ion_dt = "qcom,smmu-enabled";
  610. const char *msm_audio_ion_smmu = "qcom,smmu-version";
  611. const char *msm_audio_ion_smmu_sid_mask = "qcom,smmu-sid-mask";
  612. bool smmu_enabled;
  613. enum apr_subsys_state q6_state;
  614. struct device *dev = &pdev->dev;
  615. struct of_phandle_args iommuspec;
  616. if (dev->of_node == NULL) {
  617. dev_err(dev,
  618. "%s: device tree is not found\n",
  619. __func__);
  620. msm_audio_ion_data.smmu_enabled = 0;
  621. return 0;
  622. }
  623. smmu_enabled = of_property_read_bool(dev->of_node,
  624. msm_audio_ion_dt);
  625. msm_audio_ion_data.smmu_enabled = smmu_enabled;
  626. if (!smmu_enabled) {
  627. dev_dbg(dev, "%s: SMMU is Disabled\n", __func__);
  628. goto exit;
  629. }
  630. q6_state = apr_get_q6_state();
  631. if (q6_state == APR_SUBSYS_DOWN) {
  632. dev_dbg(dev,
  633. "defering %s, adsp_state %d\n",
  634. __func__, q6_state);
  635. return -EPROBE_DEFER;
  636. }
  637. dev_dbg(dev, "%s: adsp is ready\n", __func__);
  638. rc = of_property_read_u32(dev->of_node,
  639. msm_audio_ion_smmu,
  640. &msm_audio_ion_data.smmu_version);
  641. if (rc) {
  642. dev_err(dev,
  643. "%s: qcom,smmu_version missing in DT node\n",
  644. __func__);
  645. return rc;
  646. }
  647. dev_dbg(dev, "%s: SMMU is Enabled. SMMU version is (%d)",
  648. __func__, msm_audio_ion_data.smmu_version);
  649. /* Get SMMU SID information from Devicetree */
  650. rc = of_property_read_u64(dev->of_node,
  651. msm_audio_ion_smmu_sid_mask,
  652. &smmu_sid_mask);
  653. if (rc) {
  654. dev_err(dev,
  655. "%s: qcom,smmu-sid-mask missing in DT node, using default\n",
  656. __func__);
  657. smmu_sid_mask = 0xFFFFFFFFFFFFFFFF;
  658. }
  659. rc = of_parse_phandle_with_args(dev->of_node, "iommus",
  660. "#iommu-cells", 0, &iommuspec);
  661. if (rc)
  662. dev_err(dev, "%s: could not get smmu SID, ret = %d\n",
  663. __func__, rc);
  664. else
  665. smmu_sid = (iommuspec.args[0] & smmu_sid_mask);
  666. msm_audio_ion_data.smmu_sid_bits =
  667. smmu_sid << MSM_AUDIO_SMMU_SID_OFFSET;
  668. if (msm_audio_ion_data.smmu_version == 0x2) {
  669. rc = msm_audio_smmu_init(dev);
  670. } else {
  671. dev_err(dev, "%s: smmu version invalid %d\n",
  672. __func__, msm_audio_ion_data.smmu_version);
  673. rc = -EINVAL;
  674. }
  675. if (rc)
  676. dev_err(dev, "%s: smmu init failed, err = %d\n",
  677. __func__, rc);
  678. exit:
  679. if (!rc)
  680. msm_audio_ion_data.device_status |= MSM_AUDIO_ION_PROBED;
  681. msm_audio_ion_data.cb_dev = dev;
  682. return rc;
  683. }
  684. static int msm_audio_ion_remove(struct platform_device *pdev)
  685. {
  686. struct device *audio_cb_dev;
  687. audio_cb_dev = msm_audio_ion_data.cb_dev;
  688. msm_audio_ion_data.smmu_enabled = 0;
  689. msm_audio_ion_data.device_status = 0;
  690. return 0;
  691. }
  692. static struct platform_driver msm_audio_ion_driver = {
  693. .driver = {
  694. .name = "msm-audio-ion",
  695. .owner = THIS_MODULE,
  696. .of_match_table = msm_audio_ion_dt_match,
  697. .suppress_bind_attrs = true,
  698. },
  699. .probe = msm_audio_ion_probe,
  700. .remove = msm_audio_ion_remove,
  701. };
  702. int __init msm_audio_ion_init(void)
  703. {
  704. return platform_driver_register(&msm_audio_ion_driver);
  705. }
  706. void msm_audio_ion_exit(void)
  707. {
  708. platform_driver_unregister(&msm_audio_ion_driver);
  709. }
  710. MODULE_DESCRIPTION("MSM Audio ION module");
  711. MODULE_LICENSE("GPL v2");