engine.h 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * Crypto engine API
  4. *
  5. * Copyright (c) 2016 Baolin Wang <[email protected]>
  6. */
  7. #ifndef _CRYPTO_ENGINE_H
  8. #define _CRYPTO_ENGINE_H
  9. #include <linux/crypto.h>
  10. #include <linux/list.h>
  11. #include <linux/kthread.h>
  12. #include <linux/spinlock.h>
  13. #include <linux/types.h>
  14. #include <crypto/algapi.h>
  15. #include <crypto/aead.h>
  16. #include <crypto/akcipher.h>
  17. #include <crypto/hash.h>
  18. #include <crypto/skcipher.h>
  19. #include <crypto/kpp.h>
  20. struct device;
  21. #define ENGINE_NAME_LEN 30
  22. /*
  23. * struct crypto_engine - crypto hardware engine
  24. * @name: the engine name
  25. * @idling: the engine is entering idle state
  26. * @busy: request pump is busy
  27. * @running: the engine is on working
  28. * @retry_support: indication that the hardware allows re-execution
  29. * of a failed backlog request
  30. * crypto-engine, in head position to keep order
  31. * @list: link with the global crypto engine list
  32. * @queue_lock: spinlock to synchronise access to request queue
  33. * @queue: the crypto queue of the engine
  34. * @rt: whether this queue is set to run as a realtime task
  35. * @prepare_crypt_hardware: a request will soon arrive from the queue
  36. * so the subsystem requests the driver to prepare the hardware
  37. * by issuing this call
  38. * @unprepare_crypt_hardware: there are currently no more requests on the
  39. * queue so the subsystem notifies the driver that it may relax the
  40. * hardware by issuing this call
  41. * @do_batch_requests: execute a batch of requests. Depends on multiple
  42. * requests support.
  43. * @kworker: kthread worker struct for request pump
  44. * @pump_requests: work struct for scheduling work to the request pump
  45. * @priv_data: the engine private data
  46. * @cur_req: the current request which is on processing
  47. */
  48. struct crypto_engine {
  49. char name[ENGINE_NAME_LEN];
  50. bool idling;
  51. bool busy;
  52. bool running;
  53. bool retry_support;
  54. struct list_head list;
  55. spinlock_t queue_lock;
  56. struct crypto_queue queue;
  57. struct device *dev;
  58. bool rt;
  59. int (*prepare_crypt_hardware)(struct crypto_engine *engine);
  60. int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
  61. int (*do_batch_requests)(struct crypto_engine *engine);
  62. struct kthread_worker *kworker;
  63. struct kthread_work pump_requests;
  64. void *priv_data;
  65. struct crypto_async_request *cur_req;
  66. };
  67. /*
  68. * struct crypto_engine_op - crypto hardware engine operations
  69. * @prepare__request: do some prepare if need before handle the current request
  70. * @unprepare_request: undo any work done by prepare_request()
  71. * @do_one_request: do encryption for current request
  72. */
  73. struct crypto_engine_op {
  74. int (*prepare_request)(struct crypto_engine *engine,
  75. void *areq);
  76. int (*unprepare_request)(struct crypto_engine *engine,
  77. void *areq);
  78. int (*do_one_request)(struct crypto_engine *engine,
  79. void *areq);
  80. };
  81. struct crypto_engine_ctx {
  82. struct crypto_engine_op op;
  83. };
  84. int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
  85. struct aead_request *req);
  86. int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
  87. struct akcipher_request *req);
  88. int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
  89. struct ahash_request *req);
  90. int crypto_transfer_kpp_request_to_engine(struct crypto_engine *engine,
  91. struct kpp_request *req);
  92. int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
  93. struct skcipher_request *req);
  94. void crypto_finalize_aead_request(struct crypto_engine *engine,
  95. struct aead_request *req, int err);
  96. void crypto_finalize_akcipher_request(struct crypto_engine *engine,
  97. struct akcipher_request *req, int err);
  98. void crypto_finalize_hash_request(struct crypto_engine *engine,
  99. struct ahash_request *req, int err);
  100. void crypto_finalize_kpp_request(struct crypto_engine *engine,
  101. struct kpp_request *req, int err);
  102. void crypto_finalize_skcipher_request(struct crypto_engine *engine,
  103. struct skcipher_request *req, int err);
  104. int crypto_engine_start(struct crypto_engine *engine);
  105. int crypto_engine_stop(struct crypto_engine *engine);
  106. struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
  107. struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev,
  108. bool retry_support,
  109. int (*cbk_do_batch)(struct crypto_engine *engine),
  110. bool rt, int qlen);
  111. int crypto_engine_exit(struct crypto_engine *engine);
  112. #endif /* _CRYPTO_ENGINE_H */