sha256.c 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * SHA-256, as specified in
  4. * http://csrc.nist.gov/groups/STM/cavp/documents/shs/sha256-384-512.pdf
  5. *
  6. * SHA-256 code by Jean-Luc Cooke <[email protected]>.
  7. *
  8. * Copyright (c) Jean-Luc Cooke <[email protected]>
  9. * Copyright (c) Andrew McDonald <[email protected]>
  10. * Copyright (c) 2002 James Morris <[email protected]>
  11. * Copyright (c) 2014 Red Hat Inc.
  12. */
  13. #include <linux/bitops.h>
  14. #include <linux/export.h>
  15. #include <linux/module.h>
  16. #include <linux/string.h>
  17. #include <crypto/sha2.h>
  18. #include <asm/unaligned.h>
  19. #include <trace/hooks/fips140.h>
  20. static const u32 SHA256_K[] = {
  21. 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
  22. 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
  23. 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
  24. 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
  25. 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
  26. 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
  27. 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
  28. 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
  29. 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
  30. 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
  31. 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
  32. 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
  33. 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
  34. 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
  35. 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
  36. 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
  37. };
  38. static inline u32 Ch(u32 x, u32 y, u32 z)
  39. {
  40. return z ^ (x & (y ^ z));
  41. }
  42. static inline u32 Maj(u32 x, u32 y, u32 z)
  43. {
  44. return (x & y) | (z & (x | y));
  45. }
  46. #define e0(x) (ror32(x, 2) ^ ror32(x, 13) ^ ror32(x, 22))
  47. #define e1(x) (ror32(x, 6) ^ ror32(x, 11) ^ ror32(x, 25))
  48. #define s0(x) (ror32(x, 7) ^ ror32(x, 18) ^ (x >> 3))
  49. #define s1(x) (ror32(x, 17) ^ ror32(x, 19) ^ (x >> 10))
  50. static inline void LOAD_OP(int I, u32 *W, const u8 *input)
  51. {
  52. W[I] = get_unaligned_be32((__u32 *)input + I);
  53. }
  54. static inline void BLEND_OP(int I, u32 *W)
  55. {
  56. W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16];
  57. }
  58. #define SHA256_ROUND(i, a, b, c, d, e, f, g, h) do { \
  59. u32 t1, t2; \
  60. t1 = h + e1(e) + Ch(e, f, g) + SHA256_K[i] + W[i]; \
  61. t2 = e0(a) + Maj(a, b, c); \
  62. d += t1; \
  63. h = t1 + t2; \
  64. } while (0)
  65. static void sha256_transform(u32 *state, const u8 *input, u32 *W)
  66. {
  67. u32 a, b, c, d, e, f, g, h;
  68. int i;
  69. /* load the input */
  70. for (i = 0; i < 16; i += 8) {
  71. LOAD_OP(i + 0, W, input);
  72. LOAD_OP(i + 1, W, input);
  73. LOAD_OP(i + 2, W, input);
  74. LOAD_OP(i + 3, W, input);
  75. LOAD_OP(i + 4, W, input);
  76. LOAD_OP(i + 5, W, input);
  77. LOAD_OP(i + 6, W, input);
  78. LOAD_OP(i + 7, W, input);
  79. }
  80. /* now blend */
  81. for (i = 16; i < 64; i += 8) {
  82. BLEND_OP(i + 0, W);
  83. BLEND_OP(i + 1, W);
  84. BLEND_OP(i + 2, W);
  85. BLEND_OP(i + 3, W);
  86. BLEND_OP(i + 4, W);
  87. BLEND_OP(i + 5, W);
  88. BLEND_OP(i + 6, W);
  89. BLEND_OP(i + 7, W);
  90. }
  91. /* load the state into our registers */
  92. a = state[0]; b = state[1]; c = state[2]; d = state[3];
  93. e = state[4]; f = state[5]; g = state[6]; h = state[7];
  94. /* now iterate */
  95. for (i = 0; i < 64; i += 8) {
  96. SHA256_ROUND(i + 0, a, b, c, d, e, f, g, h);
  97. SHA256_ROUND(i + 1, h, a, b, c, d, e, f, g);
  98. SHA256_ROUND(i + 2, g, h, a, b, c, d, e, f);
  99. SHA256_ROUND(i + 3, f, g, h, a, b, c, d, e);
  100. SHA256_ROUND(i + 4, e, f, g, h, a, b, c, d);
  101. SHA256_ROUND(i + 5, d, e, f, g, h, a, b, c);
  102. SHA256_ROUND(i + 6, c, d, e, f, g, h, a, b);
  103. SHA256_ROUND(i + 7, b, c, d, e, f, g, h, a);
  104. }
  105. state[0] += a; state[1] += b; state[2] += c; state[3] += d;
  106. state[4] += e; state[5] += f; state[6] += g; state[7] += h;
  107. }
  108. void sha256_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
  109. {
  110. unsigned int partial, done;
  111. const u8 *src;
  112. u32 W[64];
  113. partial = sctx->count & 0x3f;
  114. sctx->count += len;
  115. done = 0;
  116. src = data;
  117. if ((partial + len) > 63) {
  118. if (partial) {
  119. done = -partial;
  120. memcpy(sctx->buf + partial, data, done + 64);
  121. src = sctx->buf;
  122. }
  123. do {
  124. sha256_transform(sctx->state, src, W);
  125. done += 64;
  126. src = data + done;
  127. } while (done + 63 < len);
  128. memzero_explicit(W, sizeof(W));
  129. partial = 0;
  130. }
  131. memcpy(sctx->buf + partial, src, len - done);
  132. }
  133. EXPORT_SYMBOL(sha256_update);
  134. void sha224_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
  135. {
  136. sha256_update(sctx, data, len);
  137. }
  138. EXPORT_SYMBOL(sha224_update);
  139. static void __sha256_final(struct sha256_state *sctx, u8 *out, int digest_words)
  140. {
  141. __be32 *dst = (__be32 *)out;
  142. __be64 bits;
  143. unsigned int index, pad_len;
  144. int i;
  145. static const u8 padding[64] = { 0x80, };
  146. /* Save number of bits */
  147. bits = cpu_to_be64(sctx->count << 3);
  148. /* Pad out to 56 mod 64. */
  149. index = sctx->count & 0x3f;
  150. pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
  151. sha256_update(sctx, padding, pad_len);
  152. /* Append length (before padding) */
  153. sha256_update(sctx, (const u8 *)&bits, sizeof(bits));
  154. /* Store state in digest */
  155. for (i = 0; i < digest_words; i++)
  156. put_unaligned_be32(sctx->state[i], &dst[i]);
  157. /* Zeroize sensitive information. */
  158. memzero_explicit(sctx, sizeof(*sctx));
  159. }
  160. void sha256_final(struct sha256_state *sctx, u8 *out)
  161. {
  162. __sha256_final(sctx, out, 8);
  163. }
  164. EXPORT_SYMBOL(sha256_final);
  165. void sha224_final(struct sha256_state *sctx, u8 *out)
  166. {
  167. __sha256_final(sctx, out, 7);
  168. }
  169. EXPORT_SYMBOL(sha224_final);
  170. void sha256(const u8 *data, unsigned int len, u8 *out)
  171. {
  172. struct sha256_state sctx;
  173. #ifndef __DISABLE_EXPORTS
  174. int hook_inuse = 0;
  175. trace_android_vh_sha256(data, len, out, &hook_inuse);
  176. if (hook_inuse)
  177. return;
  178. #endif
  179. sha256_init(&sctx);
  180. sha256_update(&sctx, data, len);
  181. sha256_final(&sctx, out);
  182. }
  183. EXPORT_SYMBOL(sha256);
  184. MODULE_LICENSE("GPL");