msm: camera: memmgr: Add support to disable DelayedUnmap
Add interface to umd to give an option whether to disable Delayed Unamp feature for a given buffer mapping. DelayedUnmap is enabled by default if umd doesn't explicitly asks for disable. CRs-Fixed: 2580128 Change-Id: I66f87a9dbdfc4d9cecdc02eb24c1c670c9985cae Signed-off-by: Pavan Kumar Chilamkurthi <pchilamk@codeaurora.org>
This commit is contained in:
@@ -505,12 +505,16 @@ static int cam_mem_util_map_hw_va(uint32_t flags,
|
|||||||
int i;
|
int i;
|
||||||
int rc = -1;
|
int rc = -1;
|
||||||
int dir = cam_mem_util_get_dma_dir(flags);
|
int dir = cam_mem_util_get_dma_dir(flags);
|
||||||
|
bool dis_delayed_unmap = false;
|
||||||
|
|
||||||
if (dir < 0) {
|
if (dir < 0) {
|
||||||
CAM_ERR(CAM_MEM, "fail to map DMA direction, dir=%d", dir);
|
CAM_ERR(CAM_MEM, "fail to map DMA direction, dir=%d", dir);
|
||||||
return dir;
|
return dir;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (flags & CAM_MEM_FLAG_DISABLE_DELAYED_UNMAP)
|
||||||
|
dis_delayed_unmap = true;
|
||||||
|
|
||||||
CAM_DBG(CAM_MEM,
|
CAM_DBG(CAM_MEM,
|
||||||
"map_hw_va : fd = %d, flags = 0x%x, dir=%d, num_hdls=%d",
|
"map_hw_va : fd = %d, flags = 0x%x, dir=%d, num_hdls=%d",
|
||||||
fd, flags, dir, num_hdls);
|
fd, flags, dir, num_hdls);
|
||||||
@@ -534,6 +538,7 @@ static int cam_mem_util_map_hw_va(uint32_t flags,
|
|||||||
for (i = 0; i < num_hdls; i++) {
|
for (i = 0; i < num_hdls; i++) {
|
||||||
rc = cam_smmu_map_user_iova(mmu_hdls[i],
|
rc = cam_smmu_map_user_iova(mmu_hdls[i],
|
||||||
fd,
|
fd,
|
||||||
|
dis_delayed_unmap,
|
||||||
dir,
|
dir,
|
||||||
(dma_addr_t *)hw_vaddr,
|
(dma_addr_t *)hw_vaddr,
|
||||||
len,
|
len,
|
||||||
|
@@ -215,8 +215,9 @@ static struct cam_dma_buff_info *cam_smmu_find_mapping_by_virt_address(int idx,
|
|||||||
dma_addr_t virt_addr);
|
dma_addr_t virt_addr);
|
||||||
|
|
||||||
static int cam_smmu_map_buffer_and_add_to_list(int idx, int ion_fd,
|
static int cam_smmu_map_buffer_and_add_to_list(int idx, int ion_fd,
|
||||||
enum dma_data_direction dma_dir, dma_addr_t *paddr_ptr,
|
bool dis_delayed_unmap, enum dma_data_direction dma_dir,
|
||||||
size_t *len_ptr, enum cam_smmu_region_id region_id);
|
dma_addr_t *paddr_ptr, size_t *len_ptr,
|
||||||
|
enum cam_smmu_region_id region_id);
|
||||||
|
|
||||||
static int cam_smmu_map_kernel_buffer_and_add_to_list(int idx,
|
static int cam_smmu_map_kernel_buffer_and_add_to_list(int idx,
|
||||||
struct dma_buf *buf, enum dma_data_direction dma_dir,
|
struct dma_buf *buf, enum dma_data_direction dma_dir,
|
||||||
@@ -1676,7 +1677,7 @@ EXPORT_SYMBOL(cam_smmu_release_sec_heap);
|
|||||||
static int cam_smmu_map_buffer_validate(struct dma_buf *buf,
|
static int cam_smmu_map_buffer_validate(struct dma_buf *buf,
|
||||||
int idx, enum dma_data_direction dma_dir, dma_addr_t *paddr_ptr,
|
int idx, enum dma_data_direction dma_dir, dma_addr_t *paddr_ptr,
|
||||||
size_t *len_ptr, enum cam_smmu_region_id region_id,
|
size_t *len_ptr, enum cam_smmu_region_id region_id,
|
||||||
struct cam_dma_buff_info **mapping_info)
|
bool dis_delayed_unmap, struct cam_dma_buff_info **mapping_info)
|
||||||
{
|
{
|
||||||
struct dma_buf_attachment *attach = NULL;
|
struct dma_buf_attachment *attach = NULL;
|
||||||
struct sg_table *table = NULL;
|
struct sg_table *table = NULL;
|
||||||
@@ -1751,7 +1752,8 @@ static int cam_smmu_map_buffer_validate(struct dma_buf *buf,
|
|||||||
}
|
}
|
||||||
iommu_cb_set.cb_info[idx].shared_mapping_size += *len_ptr;
|
iommu_cb_set.cb_info[idx].shared_mapping_size += *len_ptr;
|
||||||
} else if (region_id == CAM_SMMU_REGION_IO) {
|
} else if (region_id == CAM_SMMU_REGION_IO) {
|
||||||
attach->dma_map_attrs |= DMA_ATTR_DELAYED_UNMAP;
|
if (!dis_delayed_unmap)
|
||||||
|
attach->dma_map_attrs |= DMA_ATTR_DELAYED_UNMAP;
|
||||||
|
|
||||||
table = dma_buf_map_attachment(attach, dma_dir);
|
table = dma_buf_map_attachment(attach, dma_dir);
|
||||||
if (IS_ERR_OR_NULL(table)) {
|
if (IS_ERR_OR_NULL(table)) {
|
||||||
@@ -1769,8 +1771,9 @@ static int cam_smmu_map_buffer_validate(struct dma_buf *buf,
|
|||||||
goto err_unmap_sg;
|
goto err_unmap_sg;
|
||||||
}
|
}
|
||||||
|
|
||||||
CAM_DBG(CAM_SMMU, "iova=%pK, region_id=%d, paddr=%pK, len=%d",
|
CAM_DBG(CAM_SMMU,
|
||||||
iova, region_id, *paddr_ptr, *len_ptr);
|
"iova=%pK, region_id=%d, paddr=%pK, len=%d, dma_map_attrs=%d",
|
||||||
|
iova, region_id, *paddr_ptr, *len_ptr, attach->dma_map_attrs);
|
||||||
|
|
||||||
if (table->sgl) {
|
if (table->sgl) {
|
||||||
CAM_DBG(CAM_SMMU,
|
CAM_DBG(CAM_SMMU,
|
||||||
@@ -1838,8 +1841,9 @@ err_out:
|
|||||||
|
|
||||||
|
|
||||||
static int cam_smmu_map_buffer_and_add_to_list(int idx, int ion_fd,
|
static int cam_smmu_map_buffer_and_add_to_list(int idx, int ion_fd,
|
||||||
enum dma_data_direction dma_dir, dma_addr_t *paddr_ptr,
|
bool dis_delayed_unmap, enum dma_data_direction dma_dir,
|
||||||
size_t *len_ptr, enum cam_smmu_region_id region_id)
|
dma_addr_t *paddr_ptr, size_t *len_ptr,
|
||||||
|
enum cam_smmu_region_id region_id)
|
||||||
{
|
{
|
||||||
int rc = -1;
|
int rc = -1;
|
||||||
struct cam_dma_buff_info *mapping_info = NULL;
|
struct cam_dma_buff_info *mapping_info = NULL;
|
||||||
@@ -1849,7 +1853,7 @@ static int cam_smmu_map_buffer_and_add_to_list(int idx, int ion_fd,
|
|||||||
buf = dma_buf_get(ion_fd);
|
buf = dma_buf_get(ion_fd);
|
||||||
|
|
||||||
rc = cam_smmu_map_buffer_validate(buf, idx, dma_dir, paddr_ptr, len_ptr,
|
rc = cam_smmu_map_buffer_validate(buf, idx, dma_dir, paddr_ptr, len_ptr,
|
||||||
region_id, &mapping_info);
|
region_id, dis_delayed_unmap, &mapping_info);
|
||||||
|
|
||||||
if (rc) {
|
if (rc) {
|
||||||
CAM_ERR(CAM_SMMU, "buffer validation failure");
|
CAM_ERR(CAM_SMMU, "buffer validation failure");
|
||||||
@@ -1873,7 +1877,7 @@ static int cam_smmu_map_kernel_buffer_and_add_to_list(int idx,
|
|||||||
struct cam_dma_buff_info *mapping_info = NULL;
|
struct cam_dma_buff_info *mapping_info = NULL;
|
||||||
|
|
||||||
rc = cam_smmu_map_buffer_validate(buf, idx, dma_dir, paddr_ptr, len_ptr,
|
rc = cam_smmu_map_buffer_validate(buf, idx, dma_dir, paddr_ptr, len_ptr,
|
||||||
region_id, &mapping_info);
|
region_id, false, &mapping_info);
|
||||||
|
|
||||||
if (rc) {
|
if (rc) {
|
||||||
CAM_ERR(CAM_SMMU, "buffer validation failure");
|
CAM_ERR(CAM_SMMU, "buffer validation failure");
|
||||||
@@ -1910,6 +1914,11 @@ static int cam_smmu_unmap_buf_and_remove_from_list(
|
|||||||
return -EINVAL;
|
return -EINVAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CAM_DBG(CAM_SMMU,
|
||||||
|
"region_id=%d, paddr=%pK, len=%d, dma_map_attrs=%d",
|
||||||
|
mapping_info->region_id, mapping_info->paddr, mapping_info->len,
|
||||||
|
mapping_info->attach->dma_map_attrs);
|
||||||
|
|
||||||
if (mapping_info->region_id == CAM_SMMU_REGION_SHARED) {
|
if (mapping_info->region_id == CAM_SMMU_REGION_SHARED) {
|
||||||
CAM_DBG(CAM_SMMU,
|
CAM_DBG(CAM_SMMU,
|
||||||
"Removing SHARED buffer paddr = %pK, len = %zu",
|
"Removing SHARED buffer paddr = %pK, len = %zu",
|
||||||
@@ -1938,7 +1947,6 @@ static int cam_smmu_unmap_buf_and_remove_from_list(
|
|||||||
iommu_cb_set.cb_info[idx].shared_mapping_size -=
|
iommu_cb_set.cb_info[idx].shared_mapping_size -=
|
||||||
mapping_info->len;
|
mapping_info->len;
|
||||||
} else if (mapping_info->region_id == CAM_SMMU_REGION_IO) {
|
} else if (mapping_info->region_id == CAM_SMMU_REGION_IO) {
|
||||||
mapping_info->attach->dma_map_attrs |= DMA_ATTR_DELAYED_UNMAP;
|
|
||||||
iommu_cb_set.cb_info[idx].io_mapping_size -= mapping_info->len;
|
iommu_cb_set.cb_info[idx].io_mapping_size -= mapping_info->len;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2691,7 +2699,7 @@ static int cam_smmu_map_iova_validate_params(int handle,
|
|||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
int cam_smmu_map_user_iova(int handle, int ion_fd,
|
int cam_smmu_map_user_iova(int handle, int ion_fd, bool dis_delayed_unmap,
|
||||||
enum cam_smmu_map_dir dir, dma_addr_t *paddr_ptr,
|
enum cam_smmu_map_dir dir, dma_addr_t *paddr_ptr,
|
||||||
size_t *len_ptr, enum cam_smmu_region_id region_id)
|
size_t *len_ptr, enum cam_smmu_region_id region_id)
|
||||||
{
|
{
|
||||||
@@ -2742,8 +2750,8 @@ int cam_smmu_map_user_iova(int handle, int ion_fd,
|
|||||||
goto get_addr_end;
|
goto get_addr_end;
|
||||||
}
|
}
|
||||||
|
|
||||||
rc = cam_smmu_map_buffer_and_add_to_list(idx, ion_fd, dma_dir,
|
rc = cam_smmu_map_buffer_and_add_to_list(idx, ion_fd,
|
||||||
paddr_ptr, len_ptr, region_id);
|
dis_delayed_unmap, dma_dir, paddr_ptr, len_ptr, region_id);
|
||||||
if (rc < 0) {
|
if (rc < 0) {
|
||||||
CAM_ERR(CAM_SMMU,
|
CAM_ERR(CAM_SMMU,
|
||||||
"mapping or add list fail, idx=%d, fd=%d, region=%d, rc=%d",
|
"mapping or add list fail, idx=%d, fd=%d, region=%d, rc=%d",
|
||||||
|
@@ -98,6 +98,8 @@ int cam_smmu_ops(int handle, enum cam_smmu_ops_param op);
|
|||||||
*
|
*
|
||||||
* @param handle: Handle to identify the CAM SMMU client (VFE, CPP, FD etc.)
|
* @param handle: Handle to identify the CAM SMMU client (VFE, CPP, FD etc.)
|
||||||
* @param ion_fd: ION handle identifying the memory buffer.
|
* @param ion_fd: ION handle identifying the memory buffer.
|
||||||
|
* @param dis_delayed_unmap: Whether to disable Delayed Unmap feature
|
||||||
|
* for this mapping
|
||||||
* @dir : Mapping direction: which will traslate toDMA_BIDIRECTIONAL,
|
* @dir : Mapping direction: which will traslate toDMA_BIDIRECTIONAL,
|
||||||
* DMA_TO_DEVICE or DMA_FROM_DEVICE
|
* DMA_TO_DEVICE or DMA_FROM_DEVICE
|
||||||
* @dma_addr : Pointer to physical address where mapped address will be
|
* @dma_addr : Pointer to physical address where mapped address will be
|
||||||
@@ -107,9 +109,8 @@ int cam_smmu_ops(int handle, enum cam_smmu_ops_param op);
|
|||||||
* @len_ptr : Length of buffer mapped returned by CAM SMMU driver.
|
* @len_ptr : Length of buffer mapped returned by CAM SMMU driver.
|
||||||
* @return Status of operation. Negative in case of error. Zero otherwise.
|
* @return Status of operation. Negative in case of error. Zero otherwise.
|
||||||
*/
|
*/
|
||||||
int cam_smmu_map_user_iova(int handle,
|
int cam_smmu_map_user_iova(int handle, int ion_fd, bool dis_delayed_unmap,
|
||||||
int ion_fd, enum cam_smmu_map_dir dir,
|
enum cam_smmu_map_dir dir, dma_addr_t *dma_addr, size_t *len_ptr,
|
||||||
dma_addr_t *dma_addr, size_t *len_ptr,
|
|
||||||
enum cam_smmu_region_id region_id);
|
enum cam_smmu_region_id region_id);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -276,6 +276,7 @@ struct cam_req_mgr_link_control {
|
|||||||
#define CAM_MEM_FLAG_CACHE (1<<10)
|
#define CAM_MEM_FLAG_CACHE (1<<10)
|
||||||
#define CAM_MEM_FLAG_HW_SHARED_ACCESS (1<<11)
|
#define CAM_MEM_FLAG_HW_SHARED_ACCESS (1<<11)
|
||||||
#define CAM_MEM_FLAG_CDSP_OUTPUT (1<<12)
|
#define CAM_MEM_FLAG_CDSP_OUTPUT (1<<12)
|
||||||
|
#define CAM_MEM_FLAG_DISABLE_DELAYED_UNMAP (1<<13)
|
||||||
|
|
||||||
#define CAM_MEM_MMU_MAX_HANDLE 16
|
#define CAM_MEM_MMU_MAX_HANDLE 16
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user