algapi.h 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * Cryptographic API for algorithms (i.e., low-level API).
  4. *
  5. * Copyright (c) 2006 Herbert Xu <[email protected]>
  6. */
  7. #ifndef _CRYPTO_ALGAPI_H
  8. #define _CRYPTO_ALGAPI_H
  9. #include <linux/align.h>
  10. #include <linux/crypto.h>
  11. #include <linux/kconfig.h>
  12. #include <linux/list.h>
  13. #include <linux/types.h>
  14. #include <asm/unaligned.h>
  15. /*
  16. * Maximum values for blocksize and alignmask, used to allocate
  17. * static buffers that are big enough for any combination of
  18. * algs and architectures. Ciphers have a lower maximum size.
  19. */
  20. #define MAX_ALGAPI_BLOCKSIZE 160
  21. #define MAX_ALGAPI_ALIGNMASK 63
  22. #define MAX_CIPHER_BLOCKSIZE 16
  23. #define MAX_CIPHER_ALIGNMASK 15
  24. struct crypto_aead;
  25. struct crypto_instance;
  26. struct module;
  27. struct notifier_block;
  28. struct rtattr;
  29. struct seq_file;
  30. struct sk_buff;
  31. struct crypto_type {
  32. unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
  33. unsigned int (*extsize)(struct crypto_alg *alg);
  34. int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
  35. int (*init_tfm)(struct crypto_tfm *tfm);
  36. void (*show)(struct seq_file *m, struct crypto_alg *alg);
  37. int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
  38. void (*free)(struct crypto_instance *inst);
  39. unsigned int type;
  40. unsigned int maskclear;
  41. unsigned int maskset;
  42. unsigned int tfmsize;
  43. };
  44. struct crypto_instance {
  45. struct crypto_alg alg;
  46. struct crypto_template *tmpl;
  47. union {
  48. /* Node in list of instances after registration. */
  49. struct hlist_node list;
  50. /* List of attached spawns before registration. */
  51. struct crypto_spawn *spawns;
  52. };
  53. void *__ctx[] CRYPTO_MINALIGN_ATTR;
  54. };
  55. struct crypto_template {
  56. struct list_head list;
  57. struct hlist_head instances;
  58. struct module *module;
  59. int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
  60. char name[CRYPTO_MAX_ALG_NAME];
  61. };
  62. struct crypto_spawn {
  63. struct list_head list;
  64. struct crypto_alg *alg;
  65. union {
  66. /* Back pointer to instance after registration.*/
  67. struct crypto_instance *inst;
  68. /* Spawn list pointer prior to registration. */
  69. struct crypto_spawn *next;
  70. };
  71. const struct crypto_type *frontend;
  72. u32 mask;
  73. bool dead;
  74. bool registered;
  75. };
  76. struct crypto_queue {
  77. struct list_head list;
  78. struct list_head *backlog;
  79. unsigned int qlen;
  80. unsigned int max_qlen;
  81. };
  82. struct scatter_walk {
  83. struct scatterlist *sg;
  84. unsigned int offset;
  85. };
  86. struct crypto_attr_alg {
  87. char name[CRYPTO_MAX_ALG_NAME];
  88. };
  89. struct crypto_attr_type {
  90. u32 type;
  91. u32 mask;
  92. };
  93. void crypto_mod_put(struct crypto_alg *alg);
  94. int crypto_register_template(struct crypto_template *tmpl);
  95. int crypto_register_templates(struct crypto_template *tmpls, int count);
  96. void crypto_unregister_template(struct crypto_template *tmpl);
  97. void crypto_unregister_templates(struct crypto_template *tmpls, int count);
  98. struct crypto_template *crypto_lookup_template(const char *name);
  99. int crypto_register_instance(struct crypto_template *tmpl,
  100. struct crypto_instance *inst);
  101. void crypto_unregister_instance(struct crypto_instance *inst);
  102. int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
  103. const char *name, u32 type, u32 mask);
  104. void crypto_drop_spawn(struct crypto_spawn *spawn);
  105. struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
  106. u32 mask);
  107. void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
  108. struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
  109. int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
  110. const char *crypto_attr_alg_name(struct rtattr *rta);
  111. int crypto_inst_setname(struct crypto_instance *inst, const char *name,
  112. struct crypto_alg *alg);
  113. void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
  114. int crypto_enqueue_request(struct crypto_queue *queue,
  115. struct crypto_async_request *request);
  116. void crypto_enqueue_request_head(struct crypto_queue *queue,
  117. struct crypto_async_request *request);
  118. struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
  119. static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
  120. {
  121. return queue->qlen;
  122. }
  123. void crypto_inc(u8 *a, unsigned int size);
  124. void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
  125. static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
  126. {
  127. if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
  128. __builtin_constant_p(size) &&
  129. (size % sizeof(unsigned long)) == 0) {
  130. unsigned long *d = (unsigned long *)dst;
  131. unsigned long *s = (unsigned long *)src;
  132. unsigned long l;
  133. while (size > 0) {
  134. l = get_unaligned(d) ^ get_unaligned(s++);
  135. put_unaligned(l, d++);
  136. size -= sizeof(unsigned long);
  137. }
  138. } else {
  139. __crypto_xor(dst, dst, src, size);
  140. }
  141. }
  142. static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
  143. unsigned int size)
  144. {
  145. if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
  146. __builtin_constant_p(size) &&
  147. (size % sizeof(unsigned long)) == 0) {
  148. unsigned long *d = (unsigned long *)dst;
  149. unsigned long *s1 = (unsigned long *)src1;
  150. unsigned long *s2 = (unsigned long *)src2;
  151. unsigned long l;
  152. while (size > 0) {
  153. l = get_unaligned(s1++) ^ get_unaligned(s2++);
  154. put_unaligned(l, d++);
  155. size -= sizeof(unsigned long);
  156. }
  157. } else {
  158. __crypto_xor(dst, src1, src2, size);
  159. }
  160. }
  161. static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
  162. {
  163. return PTR_ALIGN(crypto_tfm_ctx(tfm),
  164. crypto_tfm_alg_alignmask(tfm) + 1);
  165. }
  166. static inline struct crypto_instance *crypto_tfm_alg_instance(
  167. struct crypto_tfm *tfm)
  168. {
  169. return container_of(tfm->__crt_alg, struct crypto_instance, alg);
  170. }
  171. static inline void *crypto_instance_ctx(struct crypto_instance *inst)
  172. {
  173. return inst->__ctx;
  174. }
  175. static inline struct crypto_async_request *crypto_get_backlog(
  176. struct crypto_queue *queue)
  177. {
  178. return queue->backlog == &queue->list ? NULL :
  179. container_of(queue->backlog, struct crypto_async_request, list);
  180. }
  181. static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
  182. {
  183. return (algt->type ^ off) & algt->mask & off;
  184. }
  185. /*
  186. * When an algorithm uses another algorithm (e.g., if it's an instance of a
  187. * template), these are the flags that should always be set on the "outer"
  188. * algorithm if any "inner" algorithm has them set.
  189. */
  190. #define CRYPTO_ALG_INHERITED_FLAGS \
  191. (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
  192. CRYPTO_ALG_ALLOCATES_MEMORY)
  193. /*
  194. * Given the type and mask that specify the flags restrictions on a template
  195. * instance being created, return the mask that should be passed to
  196. * crypto_grab_*() (along with type=0) to honor any request the user made to
  197. * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
  198. */
  199. static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
  200. {
  201. return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
  202. }
  203. noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
  204. /**
  205. * crypto_memneq - Compare two areas of memory without leaking
  206. * timing information.
  207. *
  208. * @a: One area of memory
  209. * @b: Another area of memory
  210. * @size: The size of the area.
  211. *
  212. * Returns 0 when data is equal, 1 otherwise.
  213. */
  214. static inline int crypto_memneq(const void *a, const void *b, size_t size)
  215. {
  216. return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
  217. }
  218. int crypto_register_notifier(struct notifier_block *nb);
  219. int crypto_unregister_notifier(struct notifier_block *nb);
  220. /* Crypto notification events. */
  221. enum {
  222. CRYPTO_MSG_ALG_REQUEST,
  223. CRYPTO_MSG_ALG_REGISTER,
  224. CRYPTO_MSG_ALG_LOADED,
  225. };
  226. static inline void crypto_request_complete(struct crypto_async_request *req,
  227. int err)
  228. {
  229. crypto_completion_t complete = req->complete;
  230. complete(req, err);
  231. }
  232. #endif /* _CRYPTO_ALGAPI_H */