|
@@ -196,6 +196,7 @@ static void cam_mem_put_slot(int32_t idx)
|
|
mutex_lock(&tbl.m_lock);
|
|
mutex_lock(&tbl.m_lock);
|
|
mutex_lock(&tbl.bufq[idx].q_lock);
|
|
mutex_lock(&tbl.bufq[idx].q_lock);
|
|
tbl.bufq[idx].active = false;
|
|
tbl.bufq[idx].active = false;
|
|
|
|
+ tbl.bufq[idx].is_internal = false;
|
|
mutex_unlock(&tbl.bufq[idx].q_lock);
|
|
mutex_unlock(&tbl.bufq[idx].q_lock);
|
|
mutex_destroy(&tbl.bufq[idx].q_lock);
|
|
mutex_destroy(&tbl.bufq[idx].q_lock);
|
|
clear_bit(idx, tbl.bitmap);
|
|
clear_bit(idx, tbl.bitmap);
|
|
@@ -540,7 +541,8 @@ static int cam_mem_util_map_hw_va(uint32_t flags,
|
|
int fd,
|
|
int fd,
|
|
dma_addr_t *hw_vaddr,
|
|
dma_addr_t *hw_vaddr,
|
|
size_t *len,
|
|
size_t *len,
|
|
- enum cam_smmu_region_id region)
|
|
|
|
|
|
+ enum cam_smmu_region_id region,
|
|
|
|
+ bool is_internal)
|
|
{
|
|
{
|
|
int i;
|
|
int i;
|
|
int rc = -1;
|
|
int rc = -1;
|
|
@@ -582,7 +584,8 @@ static int cam_mem_util_map_hw_va(uint32_t flags,
|
|
dir,
|
|
dir,
|
|
(dma_addr_t *)hw_vaddr,
|
|
(dma_addr_t *)hw_vaddr,
|
|
len,
|
|
len,
|
|
- region);
|
|
|
|
|
|
+ region,
|
|
|
|
+ is_internal);
|
|
|
|
|
|
if (rc < 0) {
|
|
if (rc < 0) {
|
|
CAM_ERR(CAM_MEM,
|
|
CAM_ERR(CAM_MEM,
|
|
@@ -675,7 +678,8 @@ int cam_mem_mgr_alloc_and_map(struct cam_mem_mgr_alloc_cmd *cmd)
|
|
fd,
|
|
fd,
|
|
&hw_vaddr,
|
|
&hw_vaddr,
|
|
&len,
|
|
&len,
|
|
- region);
|
|
|
|
|
|
+ region,
|
|
|
|
+ true);
|
|
|
|
|
|
if (rc) {
|
|
if (rc) {
|
|
CAM_ERR(CAM_MEM,
|
|
CAM_ERR(CAM_MEM,
|
|
@@ -691,6 +695,7 @@ int cam_mem_mgr_alloc_and_map(struct cam_mem_mgr_alloc_cmd *cmd)
|
|
tbl.bufq[idx].dma_buf = NULL;
|
|
tbl.bufq[idx].dma_buf = NULL;
|
|
tbl.bufq[idx].flags = cmd->flags;
|
|
tbl.bufq[idx].flags = cmd->flags;
|
|
tbl.bufq[idx].buf_handle = GET_MEM_HANDLE(idx, fd);
|
|
tbl.bufq[idx].buf_handle = GET_MEM_HANDLE(idx, fd);
|
|
|
|
+ tbl.bufq[idx].is_internal = true;
|
|
if (cmd->flags & CAM_MEM_FLAG_PROTECTED_MODE)
|
|
if (cmd->flags & CAM_MEM_FLAG_PROTECTED_MODE)
|
|
CAM_MEM_MGR_SET_SECURE_HDL(tbl.bufq[idx].buf_handle, true);
|
|
CAM_MEM_MGR_SET_SECURE_HDL(tbl.bufq[idx].buf_handle, true);
|
|
|
|
|
|
@@ -733,6 +738,23 @@ slot_fail:
|
|
return rc;
|
|
return rc;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+static bool cam_mem_util_is_map_internal(int32_t fd)
|
|
|
|
+{
|
|
|
|
+ uint32_t i;
|
|
|
|
+ bool is_internal = false;
|
|
|
|
+
|
|
|
|
+ mutex_lock(&tbl.m_lock);
|
|
|
|
+ for_each_set_bit(i, tbl.bitmap, tbl.bits) {
|
|
|
|
+ if (tbl.bufq[i].fd == fd) {
|
|
|
|
+ is_internal = tbl.bufq[i].is_internal;
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ mutex_unlock(&tbl.m_lock);
|
|
|
|
+
|
|
|
|
+ return is_internal;
|
|
|
|
+}
|
|
|
|
+
|
|
int cam_mem_mgr_map(struct cam_mem_mgr_map_cmd *cmd)
|
|
int cam_mem_mgr_map(struct cam_mem_mgr_map_cmd *cmd)
|
|
{
|
|
{
|
|
int32_t idx;
|
|
int32_t idx;
|
|
@@ -740,6 +762,7 @@ int cam_mem_mgr_map(struct cam_mem_mgr_map_cmd *cmd)
|
|
struct dma_buf *dmabuf;
|
|
struct dma_buf *dmabuf;
|
|
dma_addr_t hw_vaddr = 0;
|
|
dma_addr_t hw_vaddr = 0;
|
|
size_t len = 0;
|
|
size_t len = 0;
|
|
|
|
+ bool is_internal = false;
|
|
|
|
|
|
if (!atomic_read(&cam_mem_mgr_state)) {
|
|
if (!atomic_read(&cam_mem_mgr_state)) {
|
|
CAM_ERR(CAM_MEM, "failed. mem_mgr not initialized");
|
|
CAM_ERR(CAM_MEM, "failed. mem_mgr not initialized");
|
|
@@ -769,6 +792,8 @@ int cam_mem_mgr_map(struct cam_mem_mgr_map_cmd *cmd)
|
|
return -EINVAL;
|
|
return -EINVAL;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ is_internal = cam_mem_util_is_map_internal(cmd->fd);
|
|
|
|
+
|
|
if ((cmd->flags & CAM_MEM_FLAG_HW_READ_WRITE) ||
|
|
if ((cmd->flags & CAM_MEM_FLAG_HW_READ_WRITE) ||
|
|
(cmd->flags & CAM_MEM_FLAG_PROTECTED_MODE)) {
|
|
(cmd->flags & CAM_MEM_FLAG_PROTECTED_MODE)) {
|
|
rc = cam_mem_util_map_hw_va(cmd->flags,
|
|
rc = cam_mem_util_map_hw_va(cmd->flags,
|
|
@@ -777,7 +802,8 @@ int cam_mem_mgr_map(struct cam_mem_mgr_map_cmd *cmd)
|
|
cmd->fd,
|
|
cmd->fd,
|
|
&hw_vaddr,
|
|
&hw_vaddr,
|
|
&len,
|
|
&len,
|
|
- CAM_SMMU_REGION_IO);
|
|
|
|
|
|
+ CAM_SMMU_REGION_IO,
|
|
|
|
+ is_internal);
|
|
if (rc) {
|
|
if (rc) {
|
|
CAM_ERR(CAM_MEM,
|
|
CAM_ERR(CAM_MEM,
|
|
"Failed in map_hw_va, flags=0x%x, fd=%d, region=%d, num_hdl=%d, rc=%d",
|
|
"Failed in map_hw_va, flags=0x%x, fd=%d, region=%d, num_hdl=%d, rc=%d",
|
|
@@ -813,6 +839,7 @@ int cam_mem_mgr_map(struct cam_mem_mgr_map_cmd *cmd)
|
|
memcpy(tbl.bufq[idx].hdls, cmd->mmu_hdls,
|
|
memcpy(tbl.bufq[idx].hdls, cmd->mmu_hdls,
|
|
sizeof(int32_t) * cmd->num_hdl);
|
|
sizeof(int32_t) * cmd->num_hdl);
|
|
tbl.bufq[idx].is_imported = true;
|
|
tbl.bufq[idx].is_imported = true;
|
|
|
|
+ tbl.bufq[idx].is_internal = is_internal;
|
|
mutex_unlock(&tbl.bufq[idx].q_lock);
|
|
mutex_unlock(&tbl.bufq[idx].q_lock);
|
|
|
|
|
|
cmd->out.buf_handle = tbl.bufq[idx].buf_handle;
|
|
cmd->out.buf_handle = tbl.bufq[idx].buf_handle;
|
|
@@ -939,6 +966,7 @@ static int cam_mem_mgr_cleanup_table(void)
|
|
tbl.bufq[i].num_hdl = 0;
|
|
tbl.bufq[i].num_hdl = 0;
|
|
tbl.bufq[i].dma_buf = NULL;
|
|
tbl.bufq[i].dma_buf = NULL;
|
|
tbl.bufq[i].active = false;
|
|
tbl.bufq[i].active = false;
|
|
|
|
+ tbl.bufq[i].is_internal = false;
|
|
mutex_unlock(&tbl.bufq[i].q_lock);
|
|
mutex_unlock(&tbl.bufq[i].q_lock);
|
|
mutex_destroy(&tbl.bufq[i].q_lock);
|
|
mutex_destroy(&tbl.bufq[i].q_lock);
|
|
}
|
|
}
|
|
@@ -1034,6 +1062,7 @@ static int cam_mem_util_unmap(int32_t idx,
|
|
tbl.bufq[idx].fd = -1;
|
|
tbl.bufq[idx].fd = -1;
|
|
tbl.bufq[idx].dma_buf = NULL;
|
|
tbl.bufq[idx].dma_buf = NULL;
|
|
tbl.bufq[idx].is_imported = false;
|
|
tbl.bufq[idx].is_imported = false;
|
|
|
|
+ tbl.bufq[idx].is_internal = false;
|
|
tbl.bufq[idx].len = 0;
|
|
tbl.bufq[idx].len = 0;
|
|
tbl.bufq[idx].num_hdl = 0;
|
|
tbl.bufq[idx].num_hdl = 0;
|
|
tbl.bufq[idx].active = false;
|
|
tbl.bufq[idx].active = false;
|