skcipher.h 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * Symmetric key ciphers.
  4. *
  5. * Copyright (c) 2007 Herbert Xu <[email protected]>
  6. */
  7. #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
  8. #define _CRYPTO_INTERNAL_SKCIPHER_H
  9. #include <crypto/algapi.h>
  10. #include <crypto/internal/cipher.h>
  11. #include <crypto/skcipher.h>
  12. #include <linux/list.h>
  13. #include <linux/types.h>
  14. struct aead_request;
  15. struct rtattr;
  16. struct skcipher_instance {
  17. void (*free)(struct skcipher_instance *inst);
  18. union {
  19. struct {
  20. char head[offsetof(struct skcipher_alg, base)];
  21. struct crypto_instance base;
  22. } s;
  23. struct skcipher_alg alg;
  24. };
  25. };
  26. struct crypto_skcipher_spawn {
  27. struct crypto_spawn base;
  28. };
  29. struct skcipher_walk {
  30. union {
  31. struct {
  32. struct page *page;
  33. unsigned long offset;
  34. } phys;
  35. struct {
  36. u8 *page;
  37. void *addr;
  38. } virt;
  39. } src, dst;
  40. struct scatter_walk in;
  41. unsigned int nbytes;
  42. struct scatter_walk out;
  43. unsigned int total;
  44. struct list_head buffers;
  45. u8 *page;
  46. u8 *buffer;
  47. u8 *oiv;
  48. void *iv;
  49. unsigned int ivsize;
  50. int flags;
  51. unsigned int blocksize;
  52. unsigned int stride;
  53. unsigned int alignmask;
  54. };
  55. static inline struct crypto_instance *skcipher_crypto_instance(
  56. struct skcipher_instance *inst)
  57. {
  58. return &inst->s.base;
  59. }
  60. static inline struct skcipher_instance *skcipher_alg_instance(
  61. struct crypto_skcipher *skcipher)
  62. {
  63. return container_of(crypto_skcipher_alg(skcipher),
  64. struct skcipher_instance, alg);
  65. }
  66. static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
  67. {
  68. return crypto_instance_ctx(skcipher_crypto_instance(inst));
  69. }
  70. static inline void skcipher_request_complete(struct skcipher_request *req, int err)
  71. {
  72. req->base.complete(&req->base, err);
  73. }
  74. int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
  75. struct crypto_instance *inst,
  76. const char *name, u32 type, u32 mask);
  77. static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
  78. {
  79. crypto_drop_spawn(&spawn->base);
  80. }
  81. static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
  82. struct crypto_skcipher_spawn *spawn)
  83. {
  84. return container_of(spawn->base.alg, struct skcipher_alg, base);
  85. }
  86. static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
  87. struct crypto_skcipher_spawn *spawn)
  88. {
  89. return crypto_skcipher_spawn_alg(spawn);
  90. }
  91. static inline struct crypto_skcipher *crypto_spawn_skcipher(
  92. struct crypto_skcipher_spawn *spawn)
  93. {
  94. return crypto_spawn_tfm2(&spawn->base);
  95. }
  96. static inline void crypto_skcipher_set_reqsize(
  97. struct crypto_skcipher *skcipher, unsigned int reqsize)
  98. {
  99. skcipher->reqsize = reqsize;
  100. }
  101. int crypto_register_skcipher(struct skcipher_alg *alg);
  102. void crypto_unregister_skcipher(struct skcipher_alg *alg);
  103. int crypto_register_skciphers(struct skcipher_alg *algs, int count);
  104. void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
  105. int skcipher_register_instance(struct crypto_template *tmpl,
  106. struct skcipher_instance *inst);
  107. int skcipher_walk_done(struct skcipher_walk *walk, int err);
  108. int skcipher_walk_virt(struct skcipher_walk *walk,
  109. struct skcipher_request *req,
  110. bool atomic);
  111. int skcipher_walk_async(struct skcipher_walk *walk,
  112. struct skcipher_request *req);
  113. int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
  114. struct aead_request *req, bool atomic);
  115. int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
  116. struct aead_request *req, bool atomic);
  117. void skcipher_walk_complete(struct skcipher_walk *walk, int err);
  118. static inline void skcipher_walk_abort(struct skcipher_walk *walk)
  119. {
  120. skcipher_walk_done(walk, -ECANCELED);
  121. }
  122. static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
  123. {
  124. return crypto_tfm_ctx(&tfm->base);
  125. }
  126. static inline void *skcipher_request_ctx(struct skcipher_request *req)
  127. {
  128. return req->__ctx;
  129. }
  130. static inline u32 skcipher_request_flags(struct skcipher_request *req)
  131. {
  132. return req->base.flags;
  133. }
  134. static inline unsigned int crypto_skcipher_alg_min_keysize(
  135. struct skcipher_alg *alg)
  136. {
  137. return alg->min_keysize;
  138. }
  139. static inline unsigned int crypto_skcipher_alg_max_keysize(
  140. struct skcipher_alg *alg)
  141. {
  142. return alg->max_keysize;
  143. }
  144. static inline unsigned int crypto_skcipher_alg_walksize(
  145. struct skcipher_alg *alg)
  146. {
  147. return alg->walksize;
  148. }
  149. /**
  150. * crypto_skcipher_walksize() - obtain walk size
  151. * @tfm: cipher handle
  152. *
  153. * In some cases, algorithms can only perform optimally when operating on
  154. * multiple blocks in parallel. This is reflected by the walksize, which
  155. * must be a multiple of the chunksize (or equal if the concern does not
  156. * apply)
  157. *
  158. * Return: walk size in bytes
  159. */
  160. static inline unsigned int crypto_skcipher_walksize(
  161. struct crypto_skcipher *tfm)
  162. {
  163. return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
  164. }
  165. /* Helpers for simple block cipher modes of operation */
  166. struct skcipher_ctx_simple {
  167. struct crypto_cipher *cipher; /* underlying block cipher */
  168. };
  169. static inline struct crypto_cipher *
  170. skcipher_cipher_simple(struct crypto_skcipher *tfm)
  171. {
  172. struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
  173. return ctx->cipher;
  174. }
  175. struct skcipher_instance *skcipher_alloc_instance_simple(
  176. struct crypto_template *tmpl, struct rtattr **tb);
  177. static inline struct crypto_alg *skcipher_ialg_simple(
  178. struct skcipher_instance *inst)
  179. {
  180. struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
  181. return crypto_spawn_cipher_alg(spawn);
  182. }
  183. #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */