cam_icp_context.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Copyright (c) 2017-2021, The Linux Foundation. All rights reserved.
  4. * Copyright (c) 2022 Qualcomm Innovation Center, Inc. All rights reserved.
  5. */
  6. #include <linux/debugfs.h>
  7. #include <linux/videodev2.h>
  8. #include <linux/slab.h>
  9. #include <linux/uaccess.h>
  10. #include <media/cam_sync.h>
  11. #include <media/cam_defs.h>
  12. #include <media/cam_icp.h>
  13. #include "cam_node.h"
  14. #include "cam_context.h"
  15. #include "cam_context_utils.h"
  16. #include "cam_icp_context.h"
  17. #include "cam_req_mgr_util.h"
  18. #include "cam_mem_mgr.h"
  19. #include "cam_trace.h"
  20. #include "cam_debug_util.h"
  21. #include "cam_packet_util.h"
  22. #include "cam_req_mgr_dev.h"
  23. #include "cam_icp_hw_mgr_intf.h"
  24. static const char icp_dev_name[] = "cam-icp";
  25. static int cam_icp_context_dump_active_request(void *data, void *args)
  26. {
  27. struct cam_context *ctx = (struct cam_context *)data;
  28. struct cam_ctx_request *req = NULL;
  29. struct cam_ctx_request *req_temp = NULL;
  30. struct cam_hw_dump_pf_args *pf_args = (struct cam_hw_dump_pf_args *)args;
  31. int rc = 0;
  32. if (!ctx || !pf_args) {
  33. CAM_ERR(CAM_ICP, "Invalid ctx %pK or pf args %pK",
  34. ctx, pf_args);
  35. return -EINVAL;
  36. }
  37. CAM_INFO(CAM_ICP, "iommu fault for icp ctx %d state %d",
  38. ctx->ctx_id, ctx->state);
  39. list_for_each_entry_safe(req, req_temp,
  40. &ctx->active_req_list, list) {
  41. CAM_INFO(CAM_ICP, "Active req_id: %llu ctx_id: %u",
  42. req->request_id, ctx->ctx_id);
  43. rc = cam_context_dump_pf_info_to_hw(ctx, pf_args, &req->pf_data);
  44. if (rc)
  45. CAM_ERR(CAM_ICP, "Failed to dump pf info ctx_id: %u state: %d",
  46. ctx->ctx_id, ctx->state);
  47. }
  48. /*
  49. * Faulted ctx found. Since IPE/BPS instances are shared among contexts,
  50. * faulted ctx is found if and only if the context contains
  51. * faulted buffer
  52. */
  53. if (pf_args->pf_context_info.ctx_found) {
  54. /* Send PF notification to UMD if PF found on current CTX */
  55. rc = cam_context_send_pf_evt(ctx, pf_args);
  56. if (rc)
  57. CAM_ERR(CAM_ICP,
  58. "Failed to notify PF event to userspace rc: %d", rc);
  59. }
  60. return rc;
  61. }
  62. static int cam_icp_context_mini_dump(void *priv, void *args)
  63. {
  64. int rc;
  65. struct cam_context *ctx;
  66. if (!priv || !args) {
  67. CAM_ERR(CAM_ICP, "Invalid priv %pK args %pK", priv, args);
  68. return -EINVAL;
  69. }
  70. ctx = (struct cam_context *)priv;
  71. rc = cam_context_mini_dump(ctx, args);
  72. if (rc)
  73. CAM_ERR(CAM_ICP, "ctx [id: %u name: %s] Mini Dump failed rc %d", ctx->dev_name,
  74. ctx->ctx_id, rc);
  75. return rc;
  76. }
  77. static int __cam_icp_acquire_dev_in_available(struct cam_context *ctx,
  78. struct cam_acquire_dev_cmd *cmd)
  79. {
  80. int rc;
  81. rc = cam_context_acquire_dev_to_hw(ctx, cmd);
  82. if (!rc) {
  83. ctx->state = CAM_CTX_ACQUIRED;
  84. trace_cam_context_state("ICP", ctx);
  85. }
  86. return rc;
  87. }
  88. static int __cam_icp_release_dev_in_acquired(struct cam_context *ctx,
  89. struct cam_release_dev_cmd *cmd)
  90. {
  91. int rc;
  92. cam_common_release_evt_params(ctx->dev_hdl);
  93. rc = cam_context_release_dev_to_hw(ctx, cmd);
  94. if (rc)
  95. CAM_ERR(CAM_ICP, "Unable to release device");
  96. ctx->state = CAM_CTX_AVAILABLE;
  97. trace_cam_context_state("ICP", ctx);
  98. return rc;
  99. }
  100. static int __cam_icp_start_dev_in_acquired(struct cam_context *ctx,
  101. struct cam_start_stop_dev_cmd *cmd)
  102. {
  103. int rc;
  104. rc = cam_context_start_dev_to_hw(ctx, cmd);
  105. if (!rc) {
  106. ctx->state = CAM_CTX_READY;
  107. trace_cam_context_state("ICP", ctx);
  108. }
  109. return rc;
  110. }
  111. static int __cam_icp_dump_dev_in_ready(
  112. struct cam_context *ctx,
  113. struct cam_dump_req_cmd *cmd)
  114. {
  115. int rc;
  116. rc = cam_context_dump_dev_to_hw(ctx, cmd);
  117. if (rc)
  118. CAM_ERR(CAM_ICP, "Failed to dump device");
  119. return rc;
  120. }
  121. static int __cam_icp_flush_dev_in_ready(struct cam_context *ctx,
  122. struct cam_flush_dev_cmd *cmd)
  123. {
  124. int rc;
  125. rc = cam_context_flush_dev_to_hw(ctx, cmd);
  126. if (rc)
  127. CAM_ERR(CAM_ICP, "Failed to flush device");
  128. return rc;
  129. }
  130. static int __cam_icp_config_dev_in_ready(struct cam_context *ctx,
  131. struct cam_config_dev_cmd *cmd)
  132. {
  133. int rc;
  134. size_t len;
  135. uintptr_t packet_addr;
  136. struct cam_packet *packet;
  137. size_t remain_len = 0;
  138. rc = cam_mem_get_cpu_buf((int32_t) cmd->packet_handle,
  139. &packet_addr, &len);
  140. if (rc) {
  141. CAM_ERR(CAM_ICP, "[%s][%d] Can not get packet address",
  142. ctx->dev_name, ctx->ctx_id);
  143. rc = -EINVAL;
  144. return rc;
  145. }
  146. remain_len = len;
  147. if ((len < sizeof(struct cam_packet)) ||
  148. (cmd->offset >= (len - sizeof(struct cam_packet)))) {
  149. CAM_ERR(CAM_CTXT,
  150. "Invalid offset, len: %zu cmd offset: %llu sizeof packet: %zu",
  151. len, cmd->offset, sizeof(struct cam_packet));
  152. return -EINVAL;
  153. }
  154. remain_len -= (size_t)cmd->offset;
  155. packet = (struct cam_packet *) ((uint8_t *)packet_addr +
  156. (uint32_t)cmd->offset);
  157. rc = cam_packet_util_validate_packet(packet, remain_len);
  158. if (rc) {
  159. CAM_ERR(CAM_CTXT, "Invalid packet params, remain length: %zu",
  160. remain_len);
  161. return rc;
  162. }
  163. if (((packet->header.op_code & 0xff) ==
  164. CAM_ICP_OPCODE_IPE_SETTINGS) ||
  165. ((packet->header.op_code & 0xff) ==
  166. CAM_ICP_OPCODE_BPS_SETTINGS))
  167. rc = cam_context_config_dev_to_hw(ctx, cmd);
  168. else
  169. rc = cam_context_prepare_dev_to_hw(ctx, cmd);
  170. if (rc)
  171. CAM_ERR(CAM_ICP, "Failed to prepare device");
  172. return rc;
  173. }
  174. static int __cam_icp_stop_dev_in_ready(struct cam_context *ctx,
  175. struct cam_start_stop_dev_cmd *cmd)
  176. {
  177. int rc;
  178. rc = cam_context_stop_dev_to_hw(ctx);
  179. if (rc)
  180. CAM_ERR(CAM_ICP, "Failed to stop device");
  181. ctx->state = CAM_CTX_ACQUIRED;
  182. trace_cam_context_state("ICP", ctx);
  183. return rc;
  184. }
  185. static int __cam_icp_release_dev_in_ready(struct cam_context *ctx,
  186. struct cam_release_dev_cmd *cmd)
  187. {
  188. int rc;
  189. rc = __cam_icp_stop_dev_in_ready(ctx, NULL);
  190. if (rc)
  191. CAM_ERR(CAM_ICP, "Failed to stop device");
  192. rc = __cam_icp_release_dev_in_acquired(ctx, cmd);
  193. if (rc)
  194. CAM_ERR(CAM_ICP, "Failed to release device");
  195. return rc;
  196. }
  197. static uint32_t get_error_code(uint32_t err_type)
  198. {
  199. switch (err_type) {
  200. case CAM_ICP_HW_ERROR_NO_MEM:
  201. return CAM_REQ_MGR_ICP_NO_MEMORY;
  202. case CAM_ICP_HW_ERROR_SYSTEM_FAILURE:
  203. return CAM_REQ_MGR_ICP_SYSTEM_FAILURE;
  204. default:
  205. return 0;
  206. }
  207. }
  208. static int __cam_icp_notify_v4l2_err_evt(struct cam_context *ctx,
  209. uint32_t err_type, uint32_t err_code, uint64_t request_id)
  210. {
  211. struct cam_req_mgr_message req_msg = {0};
  212. int rc;
  213. req_msg.session_hdl = ctx->session_hdl;
  214. req_msg.u.err_msg.device_hdl = ctx->dev_hdl;
  215. req_msg.u.err_msg.error_type = err_type;
  216. req_msg.u.err_msg.link_hdl = ctx->link_hdl;
  217. req_msg.u.err_msg.request_id = request_id;
  218. req_msg.u.err_msg.resource_size = 0x0;
  219. req_msg.u.err_msg.error_code = err_code;
  220. rc = cam_req_mgr_notify_message(&req_msg, V4L_EVENT_CAM_REQ_MGR_ERROR,
  221. V4L_EVENT_CAM_REQ_MGR_EVENT);
  222. if (rc)
  223. CAM_ERR(CAM_ICP,
  224. "Error in notifying the error time for req id:%lld ctx %u",
  225. request_id,
  226. ctx->ctx_id);
  227. CAM_INFO(CAM_ICP,
  228. "CTX: [%s][%d] notifying error to userspace err type: %d, err code: %u, req id: %llu",
  229. ctx->dev_name, ctx->ctx_id, err_type, err_code, request_id);
  230. return rc;
  231. }
  232. static int cam_icp_ctx_handle_fatal_error(void *ctx, void *err_evt_data)
  233. {
  234. struct cam_icp_hw_error_evt_data *err_evt;
  235. uint32_t err_code = 0;
  236. int rc;
  237. err_evt = (struct cam_icp_hw_error_evt_data *)err_evt_data;
  238. err_code = get_error_code(err_evt->err_type);
  239. rc = __cam_icp_notify_v4l2_err_evt(ctx, CAM_REQ_MGR_ERROR_TYPE_RECOVERY,
  240. err_code, err_evt->req_id);
  241. return rc;
  242. }
  243. static int cam_icp_ctx_handle_buf_done_in_ready(void *ctx, void *done_evt_data)
  244. {
  245. struct cam_icp_hw_buf_done_evt_data *buf_done;
  246. buf_done = (struct cam_icp_hw_buf_done_evt_data *)done_evt_data;
  247. return cam_context_buf_done_from_hw(ctx, buf_done->buf_done_data, buf_done->evt_id);
  248. }
  249. static int cam_icp_ctx_handle_error_inducement(void *ctx, void *inject_evt_arg)
  250. {
  251. return cam_context_apply_evt_injection(ctx, inject_evt_arg);
  252. }
  253. static int __cam_icp_ctx_handle_hw_event(void *ctx,
  254. uint32_t evt_id, void *evt_data)
  255. {
  256. int rc;
  257. if (!ctx || !evt_data) {
  258. CAM_ERR(CAM_ICP, "Invalid ctx %s and event data %s",
  259. CAM_IS_NULL_TO_STR(ctx), CAM_IS_NULL_TO_STR(evt_data));
  260. return -EINVAL;
  261. }
  262. switch (evt_id) {
  263. case CAM_ICP_EVT_ID_BUF_DONE:
  264. rc = cam_icp_ctx_handle_buf_done_in_ready(ctx, evt_data);
  265. break;
  266. case CAM_ICP_EVT_ID_ERROR:
  267. rc = cam_icp_ctx_handle_fatal_error(ctx, evt_data);
  268. break;
  269. case CAM_ICP_EVT_ID_INJECT_EVENT:
  270. rc = cam_icp_ctx_handle_error_inducement(ctx, evt_data);
  271. break;
  272. default:
  273. CAM_ERR(CAM_ICP, "Invalid event id: %u", evt_id);
  274. rc = -EINVAL;
  275. }
  276. return rc;
  277. }
  278. static int cam_icp_context_validate_event_notify_injection(struct cam_context *ctx,
  279. struct cam_hw_inject_evt_param *evt_params)
  280. {
  281. int rc = 0;
  282. uint32_t evt_type;
  283. uint64_t req_id;
  284. req_id = evt_params->req_id;
  285. evt_type = evt_params->u.evt_notify.evt_notify_type;
  286. switch (evt_type) {
  287. case V4L_EVENT_CAM_REQ_MGR_ERROR: {
  288. struct cam_hw_inject_err_evt_param *err_evt_params =
  289. &evt_params->u.evt_notify.u.err_evt_params;
  290. switch (err_evt_params->err_type) {
  291. case CAM_REQ_MGR_ERROR_TYPE_RECOVERY:
  292. case CAM_REQ_MGR_ERROR_TYPE_FULL_RECOVERY:
  293. break;
  294. default:
  295. CAM_ERR(CAM_ICP,
  296. "Invalid error type: %u for error event injection err code: %u req id: %llu ctx id: %u dev hdl: %d",
  297. err_evt_params->err_type, err_evt_params->err_code,
  298. req_id, ctx->ctx_id, ctx->dev_hdl);
  299. return -EINVAL;
  300. }
  301. CAM_INFO(CAM_ICP,
  302. "Inject ERR evt: err code: %u err type: %u req id: %llu ctx id: %u dev hdl: %d",
  303. err_evt_params->err_code, err_evt_params->err_type,
  304. req_id, ctx->ctx_id, ctx->dev_hdl);
  305. break;
  306. }
  307. case V4L_EVENT_CAM_REQ_MGR_PF_ERROR: {
  308. struct cam_hw_inject_pf_evt_param *pf_evt_params =
  309. &evt_params->u.evt_notify.u.pf_evt_params;
  310. bool non_fatal_en;
  311. rc = cam_smmu_is_cb_non_fatal_fault_en(ctx->img_iommu_hdl, &non_fatal_en);
  312. if (rc) {
  313. CAM_ERR(CAM_ICP,
  314. "Fail to query whether device's cb has non-fatal enabled rc: %d",
  315. rc);
  316. return rc;
  317. }
  318. if (!non_fatal_en) {
  319. CAM_ERR(CAM_ICP,
  320. "Fail to inject page fault event notification. Page fault is fatal for ICP");
  321. return -EINVAL;
  322. }
  323. CAM_INFO(CAM_ICP,
  324. "Inject PF evt: req_id: %llu ctx id: %u dev hdl: %d ctx found: %hhu",
  325. req_id, ctx->ctx_id, ctx->dev_hdl, pf_evt_params->ctx_found);
  326. break;
  327. }
  328. default:
  329. CAM_ERR(CAM_ICP, "Event notification type not supported: %u", evt_type);
  330. rc = -EINVAL;
  331. }
  332. return rc;
  333. }
  334. static int cam_icp_context_inject_evt(void *context, void *evt_args)
  335. {
  336. struct cam_context *ctx = context;
  337. struct cam_hw_inject_evt_param *evt_params = NULL;
  338. struct cam_hw_inject_buffer_error_param *buf_err_params = NULL;
  339. int rc = 0;
  340. if (!ctx || !evt_args) {
  341. CAM_ERR(CAM_ICP,
  342. "invalid params ctx %s event args %s",
  343. CAM_IS_NULL_TO_STR(ctx), CAM_IS_NULL_TO_STR(evt_args));
  344. return -EINVAL;
  345. }
  346. evt_params = (struct cam_hw_inject_evt_param *)evt_args;
  347. if (evt_params->inject_id == CAM_COMMON_EVT_INJECT_BUFFER_ERROR_TYPE) {
  348. buf_err_params = &evt_params->u.buf_err_evt;
  349. if (buf_err_params->sync_error > CAM_SYNC_ICP_EVENT_START ||
  350. buf_err_params->sync_error < CAM_SYNC_ICP_EVENT_END) {
  351. CAM_INFO(CAM_ICP, "Inject buffer sync error %u ctx id: %u req id %llu",
  352. buf_err_params->sync_error, ctx->ctx_id, evt_params->req_id);
  353. } else {
  354. CAM_ERR(CAM_ICP, "Invalid buffer sync error %u ctx id: %u req id %llu",
  355. buf_err_params->sync_error, ctx->ctx_id, evt_params->req_id);
  356. return -EINVAL;
  357. }
  358. } else {
  359. rc = cam_icp_context_validate_event_notify_injection(ctx, evt_params);
  360. if (rc) {
  361. CAM_ERR(CAM_ICP,
  362. "Event notification injection failed validation rc: %d", rc);
  363. return -EINVAL;
  364. }
  365. }
  366. if (ctx->hw_mgr_intf->hw_inject_evt)
  367. ctx->hw_mgr_intf->hw_inject_evt(ctx->ctxt_to_hw_map, evt_args);
  368. return rc;
  369. }
  370. static struct cam_ctx_ops
  371. cam_icp_ctx_state_machine[CAM_CTX_STATE_MAX] = {
  372. /* Uninit */
  373. {
  374. .ioctl_ops = {},
  375. .crm_ops = {},
  376. .irq_ops = NULL,
  377. },
  378. /* Available */
  379. {
  380. .ioctl_ops = {
  381. .acquire_dev = __cam_icp_acquire_dev_in_available,
  382. },
  383. .crm_ops = {},
  384. .irq_ops = NULL,
  385. .mini_dump_ops = cam_icp_context_mini_dump,
  386. },
  387. /* Acquired */
  388. {
  389. .ioctl_ops = {
  390. .release_dev = __cam_icp_release_dev_in_acquired,
  391. .start_dev = __cam_icp_start_dev_in_acquired,
  392. .config_dev = __cam_icp_config_dev_in_ready,
  393. .flush_dev = __cam_icp_flush_dev_in_ready,
  394. .dump_dev = __cam_icp_dump_dev_in_ready,
  395. },
  396. .crm_ops = {},
  397. .irq_ops = __cam_icp_ctx_handle_hw_event,
  398. .pagefault_ops = cam_icp_context_dump_active_request,
  399. .mini_dump_ops = cam_icp_context_mini_dump,
  400. .evt_inject_ops = cam_icp_context_inject_evt,
  401. },
  402. /* Ready */
  403. {
  404. .ioctl_ops = {
  405. .stop_dev = __cam_icp_stop_dev_in_ready,
  406. .release_dev = __cam_icp_release_dev_in_ready,
  407. .config_dev = __cam_icp_config_dev_in_ready,
  408. .flush_dev = __cam_icp_flush_dev_in_ready,
  409. .dump_dev = __cam_icp_dump_dev_in_ready,
  410. },
  411. .crm_ops = {},
  412. .irq_ops = __cam_icp_ctx_handle_hw_event,
  413. .pagefault_ops = cam_icp_context_dump_active_request,
  414. .mini_dump_ops = cam_icp_context_mini_dump,
  415. .evt_inject_ops = cam_icp_context_inject_evt,
  416. },
  417. /* Flushed */
  418. {
  419. .ioctl_ops = {},
  420. },
  421. /* Activated */
  422. {
  423. .ioctl_ops = {},
  424. .crm_ops = {},
  425. .irq_ops = NULL,
  426. .pagefault_ops = cam_icp_context_dump_active_request,
  427. .mini_dump_ops = cam_icp_context_mini_dump,
  428. .evt_inject_ops = cam_icp_context_inject_evt,
  429. },
  430. };
  431. int cam_icp_context_init(struct cam_icp_context *ctx,
  432. struct cam_hw_mgr_intf *hw_intf, uint32_t ctx_id, int img_iommu_hdl)
  433. {
  434. int rc;
  435. if ((!ctx) || (!ctx->base) || (!hw_intf)) {
  436. CAM_ERR(CAM_ICP, "Invalid params: %pK %pK", ctx, hw_intf);
  437. rc = -EINVAL;
  438. goto err;
  439. }
  440. rc = cam_context_init(ctx->base, icp_dev_name, CAM_ICP, ctx_id,
  441. NULL, hw_intf, ctx->req_base, CAM_CTX_ICP_REQ_MAX, img_iommu_hdl);
  442. if (rc) {
  443. CAM_ERR(CAM_ICP, "Camera Context Base init failed");
  444. goto err;
  445. }
  446. ctx->base->state_machine = cam_icp_ctx_state_machine;
  447. ctx->base->ctx_priv = ctx;
  448. ctx->base->max_hw_update_entries = CAM_CTX_CFG_MAX;
  449. ctx->base->max_in_map_entries = CAM_CTX_CFG_MAX;
  450. ctx->base->max_out_map_entries = CAM_CTX_CFG_MAX;
  451. ctx->ctxt_to_hw_map = NULL;
  452. err:
  453. return rc;
  454. }
  455. int cam_icp_context_deinit(struct cam_icp_context *ctx)
  456. {
  457. if ((!ctx) || (!ctx->base)) {
  458. CAM_ERR(CAM_ICP, "Invalid params: %pK", ctx);
  459. return -EINVAL;
  460. }
  461. cam_context_deinit(ctx->base);
  462. memset(ctx, 0, sizeof(*ctx));
  463. return 0;
  464. }