|
@@ -2288,150 +2288,202 @@ fail1:
|
|
}
|
|
}
|
|
|
|
|
|
#ifdef IPA_OFFLOAD
|
|
#ifdef IPA_OFFLOAD
|
|
|
|
+#ifdef QCA_WIFI_3_0
|
|
/**
|
|
/**
|
|
- * htt_rx_ipa_uc_attach() - attach htt ipa uc rx resource
|
|
|
|
|
|
+ * htt_rx_ipa_uc_alloc_wdi2_rsc() - Allocate WDI2.0 resources
|
|
* @pdev: htt context
|
|
* @pdev: htt context
|
|
- * @rx_ind_ring_size: rx ring size
|
|
|
|
|
|
+ * @rx_ind_ring_elements: rx ring elements
|
|
*
|
|
*
|
|
* Return: 0 success
|
|
* Return: 0 success
|
|
*/
|
|
*/
|
|
-int htt_rx_ipa_uc_attach(struct htt_pdev_t *pdev,
|
|
|
|
|
|
+int htt_rx_ipa_uc_alloc_wdi2_rsc(struct htt_pdev_t *pdev,
|
|
unsigned int rx_ind_ring_elements)
|
|
unsigned int rx_ind_ring_elements)
|
|
{
|
|
{
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr =
|
|
|
|
|
|
+ /* Allocate RX2 indication ring */
|
|
|
|
+ /* RX2 IND ring element
|
|
|
|
+ * 4bytes: pointer
|
|
|
|
+ * 2bytes: VDEV ID
|
|
|
|
+ * 2bytes: length */
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr =
|
|
cdf_os_mem_alloc_consistent(
|
|
cdf_os_mem_alloc_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
rx_ind_ring_elements *
|
|
rx_ind_ring_elements *
|
|
sizeof(struct ipa_uc_rx_ring_elem_t),
|
|
sizeof(struct ipa_uc_rx_ring_elem_t),
|
|
- &pdev->ipa_uc_rx_rsc.rx_ind_ring_base.paddr,
|
|
|
|
|
|
+ &pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
- rx_ind_ring_base),
|
|
|
|
|
|
+ rx2_ind_ring_base),
|
|
memctx));
|
|
memctx));
|
|
- if (!pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr) {
|
|
|
|
|
|
+ if (!pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr) {
|
|
cdf_print("%s: RX IND RING alloc fail", __func__);
|
|
cdf_print("%s: RX IND RING alloc fail", __func__);
|
|
return -ENOBUFS;
|
|
return -ENOBUFS;
|
|
}
|
|
}
|
|
|
|
|
|
/* RX indication ring size, by bytes */
|
|
/* RX indication ring size, by bytes */
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_size =
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_size =
|
|
rx_ind_ring_elements * sizeof(struct ipa_uc_rx_ring_elem_t);
|
|
rx_ind_ring_elements * sizeof(struct ipa_uc_rx_ring_elem_t);
|
|
- cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_size);
|
|
|
|
|
|
+ cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_size);
|
|
|
|
|
|
/* Allocate RX process done index */
|
|
/* Allocate RX process done index */
|
|
- pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr =
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr =
|
|
cdf_os_mem_alloc_consistent(
|
|
cdf_os_mem_alloc_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
4,
|
|
4,
|
|
- &pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.paddr,
|
|
|
|
|
|
+ &pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
rx_ipa_prc_done_idx),
|
|
rx_ipa_prc_done_idx),
|
|
memctx));
|
|
memctx));
|
|
- if (!pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr) {
|
|
|
|
|
|
+ if (!pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr) {
|
|
cdf_print("%s: RX PROC DONE IND alloc fail", __func__);
|
|
cdf_print("%s: RX PROC DONE IND alloc fail", __func__);
|
|
cdf_os_mem_free_consistent(
|
|
cdf_os_mem_free_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_size,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_base.paddr,
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_size,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
- rx_ind_ring_base),
|
|
|
|
|
|
+ rx2_ind_ring_base),
|
|
memctx));
|
|
memctx));
|
|
return -ENOBUFS;
|
|
return -ENOBUFS;
|
|
}
|
|
}
|
|
- cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr, 4);
|
|
|
|
|
|
+ cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr, 4);
|
|
|
|
+ return 0;
|
|
|
|
+}
|
|
|
|
+#else
|
|
|
|
+int htt_rx_ipa_uc_alloc_wdi2_rsc(struct htt_pdev_t *pdev,
|
|
|
|
+ unsigned int rx_ind_ring_elements)
|
|
|
|
+{
|
|
|
|
+ return 0;
|
|
|
|
+}
|
|
|
|
+#endif
|
|
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr =
|
|
|
|
|
|
+/**
|
|
|
|
+ * htt_rx_ipa_uc_attach() - attach htt ipa uc rx resource
|
|
|
|
+ * @pdev: htt context
|
|
|
|
+ * @rx_ind_ring_size: rx ring size
|
|
|
|
+ *
|
|
|
|
+ * Return: 0 success
|
|
|
|
+ */
|
|
|
|
+int htt_rx_ipa_uc_attach(struct htt_pdev_t *pdev,
|
|
|
|
+ unsigned int rx_ind_ring_elements)
|
|
|
|
+{
|
|
|
|
+ int ret = 0;
|
|
|
|
+ /* Allocate RX indication ring */
|
|
|
|
+ /* RX IND ring element
|
|
|
|
+ * 4bytes: pointer
|
|
|
|
+ * 2bytes: VDEV ID
|
|
|
|
+ * 2bytes: length */
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr =
|
|
cdf_os_mem_alloc_consistent(
|
|
cdf_os_mem_alloc_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
rx_ind_ring_elements *
|
|
rx_ind_ring_elements *
|
|
- sizeof(cdf_dma_addr_t),
|
|
|
|
- &pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.paddr,
|
|
|
|
|
|
+ sizeof(struct ipa_uc_rx_ring_elem_t),
|
|
|
|
+ &pdev->ipa_uc_rx_rsc.rx_ind_ring_base.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
- rx2_ind_ring_base),
|
|
|
|
|
|
+ rx_ind_ring_base),
|
|
memctx));
|
|
memctx));
|
|
- if (!pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr) {
|
|
|
|
|
|
+ if (!pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr) {
|
|
cdf_print("%s: RX IND RING alloc fail", __func__);
|
|
cdf_print("%s: RX IND RING alloc fail", __func__);
|
|
return -ENOBUFS;
|
|
return -ENOBUFS;
|
|
}
|
|
}
|
|
|
|
|
|
/* RX indication ring size, by bytes */
|
|
/* RX indication ring size, by bytes */
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_size =
|
|
|
|
- rx_ind_ring_elements * sizeof(cdf_dma_addr_t);
|
|
|
|
- cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_size);
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_size =
|
|
|
|
+ rx_ind_ring_elements * sizeof(struct ipa_uc_rx_ring_elem_t);
|
|
|
|
+ cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_size);
|
|
|
|
|
|
/* Allocate RX process done index */
|
|
/* Allocate RX process done index */
|
|
- pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr =
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr =
|
|
cdf_os_mem_alloc_consistent(
|
|
cdf_os_mem_alloc_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
4,
|
|
4,
|
|
- &pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.paddr,
|
|
|
|
|
|
+ &pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
rx_ipa_prc_done_idx),
|
|
rx_ipa_prc_done_idx),
|
|
memctx));
|
|
memctx));
|
|
- if (!pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr) {
|
|
|
|
|
|
+ if (!pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr) {
|
|
cdf_print("%s: RX PROC DONE IND alloc fail", __func__);
|
|
cdf_print("%s: RX PROC DONE IND alloc fail", __func__);
|
|
cdf_os_mem_free_consistent(
|
|
cdf_os_mem_free_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_size,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.paddr,
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_size,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_base.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
- rx2_ind_ring_base),
|
|
|
|
|
|
+ rx_ind_ring_base),
|
|
memctx));
|
|
memctx));
|
|
return -ENOBUFS;
|
|
return -ENOBUFS;
|
|
}
|
|
}
|
|
- cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr, 4);
|
|
|
|
- return 0;
|
|
|
|
|
|
+ cdf_mem_zero(pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr, 4);
|
|
|
|
+
|
|
|
|
+ ret = htt_rx_ipa_uc_alloc_wdi2_rsc(pdev, rx_ind_ring_elements);
|
|
|
|
+ return ret;
|
|
}
|
|
}
|
|
|
|
|
|
-int htt_rx_ipa_uc_detach(struct htt_pdev_t *pdev)
|
|
|
|
|
|
+#ifdef QCA_WIFI_3_0
|
|
|
|
+/**
|
|
|
|
+ * htt_rx_ipa_uc_free_wdi2_rsc() - Free WDI2.0 resources
|
|
|
|
+ * @pdev: htt context
|
|
|
|
+ *
|
|
|
|
+ * Return: None
|
|
|
|
+ */
|
|
|
|
+void htt_rx_ipa_uc_free_wdi2_rsc(struct htt_pdev_t *pdev)
|
|
{
|
|
{
|
|
- if (pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr) {
|
|
|
|
|
|
+ if (pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr) {
|
|
cdf_os_mem_free_consistent(
|
|
cdf_os_mem_free_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_size,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx_ind_ring_base.paddr,
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_size,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
- rx_ind_ring_base),
|
|
|
|
|
|
+ rx2_ind_ring_base),
|
|
memctx));
|
|
memctx));
|
|
}
|
|
}
|
|
|
|
|
|
- if (pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr) {
|
|
|
|
|
|
+ if (pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr) {
|
|
cdf_os_mem_free_consistent(
|
|
cdf_os_mem_free_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
4,
|
|
4,
|
|
pdev->ipa_uc_rx_rsc.
|
|
pdev->ipa_uc_rx_rsc.
|
|
rx_ipa_prc_done_idx.vaddr,
|
|
rx_ipa_prc_done_idx.vaddr,
|
|
- pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.paddr,
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
rx_ipa_prc_done_idx),
|
|
rx_ipa_prc_done_idx),
|
|
memctx));
|
|
memctx));
|
|
}
|
|
}
|
|
- if (pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr) {
|
|
|
|
|
|
+}
|
|
|
|
+#else
|
|
|
|
+void htt_rx_ipa_uc_free_wdi2_rsc(struct htt_pdev_t *pdev)
|
|
|
|
+{
|
|
|
|
+ return;
|
|
|
|
+}
|
|
|
|
+#endif
|
|
|
|
+
|
|
|
|
+int htt_rx_ipa_uc_detach(struct htt_pdev_t *pdev)
|
|
|
|
+{
|
|
|
|
+ if (pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr) {
|
|
cdf_os_mem_free_consistent(
|
|
cdf_os_mem_free_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_size,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.vaddr,
|
|
|
|
- pdev->ipa_uc_rx_rsc.rx2_ind_ring_base.paddr,
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_size,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_base.vaddr,
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ind_ring_base.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
- rx2_ind_ring_base),
|
|
|
|
|
|
+ rx_ind_ring_base),
|
|
memctx));
|
|
memctx));
|
|
}
|
|
}
|
|
|
|
|
|
- if (pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.vaddr) {
|
|
|
|
|
|
+ if (pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.vaddr) {
|
|
cdf_os_mem_free_consistent(
|
|
cdf_os_mem_free_consistent(
|
|
pdev->osdev,
|
|
pdev->osdev,
|
|
4,
|
|
4,
|
|
pdev->ipa_uc_rx_rsc.
|
|
pdev->ipa_uc_rx_rsc.
|
|
rx_ipa_prc_done_idx.vaddr,
|
|
rx_ipa_prc_done_idx.vaddr,
|
|
- pdev->ipa_uc_rx_rsc.rx2_ipa_prc_done_idx.paddr,
|
|
|
|
|
|
+ pdev->ipa_uc_rx_rsc.rx_ipa_prc_done_idx.paddr,
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
cdf_get_dma_mem_context((&pdev->ipa_uc_rx_rsc.
|
|
rx2_ipa_prc_done_idx),
|
|
rx2_ipa_prc_done_idx),
|
|
memctx));
|
|
memctx));
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ htt_rx_ipa_uc_free_wdi2_rsc(pdev);
|
|
return 0;
|
|
return 0;
|
|
}
|
|
}
|
|
#endif /* IPA_OFFLOAD */
|
|
#endif /* IPA_OFFLOAD */
|