siphash.c 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
  2. /* Copyright (C) 2016-2022 Jason A. Donenfeld <[email protected]>. All Rights Reserved.
  3. *
  4. * SipHash: a fast short-input PRF
  5. * https://131002.net/siphash/
  6. *
  7. * This implementation is specifically for SipHash2-4 for a secure PRF
  8. * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
  9. * hashtables.
  10. */
  11. #include <linux/siphash.h>
  12. #include <asm/unaligned.h>
  13. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  14. #include <linux/dcache.h>
  15. #include <asm/word-at-a-time.h>
  16. #endif
  17. #define SIPROUND SIPHASH_PERMUTATION(v0, v1, v2, v3)
  18. #define PREAMBLE(len) \
  19. u64 v0 = SIPHASH_CONST_0; \
  20. u64 v1 = SIPHASH_CONST_1; \
  21. u64 v2 = SIPHASH_CONST_2; \
  22. u64 v3 = SIPHASH_CONST_3; \
  23. u64 b = ((u64)(len)) << 56; \
  24. v3 ^= key->key[1]; \
  25. v2 ^= key->key[0]; \
  26. v1 ^= key->key[1]; \
  27. v0 ^= key->key[0];
  28. #define POSTAMBLE \
  29. v3 ^= b; \
  30. SIPROUND; \
  31. SIPROUND; \
  32. v0 ^= b; \
  33. v2 ^= 0xff; \
  34. SIPROUND; \
  35. SIPROUND; \
  36. SIPROUND; \
  37. SIPROUND; \
  38. return (v0 ^ v1) ^ (v2 ^ v3);
  39. #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  40. u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
  41. {
  42. const u8 *end = data + len - (len % sizeof(u64));
  43. const u8 left = len & (sizeof(u64) - 1);
  44. u64 m;
  45. PREAMBLE(len)
  46. for (; data != end; data += sizeof(u64)) {
  47. m = le64_to_cpup(data);
  48. v3 ^= m;
  49. SIPROUND;
  50. SIPROUND;
  51. v0 ^= m;
  52. }
  53. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  54. if (left)
  55. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  56. bytemask_from_count(left)));
  57. #else
  58. switch (left) {
  59. case 7: b |= ((u64)end[6]) << 48; fallthrough;
  60. case 6: b |= ((u64)end[5]) << 40; fallthrough;
  61. case 5: b |= ((u64)end[4]) << 32; fallthrough;
  62. case 4: b |= le32_to_cpup(data); break;
  63. case 3: b |= ((u64)end[2]) << 16; fallthrough;
  64. case 2: b |= le16_to_cpup(data); break;
  65. case 1: b |= end[0];
  66. }
  67. #endif
  68. POSTAMBLE
  69. }
  70. EXPORT_SYMBOL(__siphash_aligned);
  71. #endif
  72. u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
  73. {
  74. const u8 *end = data + len - (len % sizeof(u64));
  75. const u8 left = len & (sizeof(u64) - 1);
  76. u64 m;
  77. PREAMBLE(len)
  78. for (; data != end; data += sizeof(u64)) {
  79. m = get_unaligned_le64(data);
  80. v3 ^= m;
  81. SIPROUND;
  82. SIPROUND;
  83. v0 ^= m;
  84. }
  85. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  86. if (left)
  87. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  88. bytemask_from_count(left)));
  89. #else
  90. switch (left) {
  91. case 7: b |= ((u64)end[6]) << 48; fallthrough;
  92. case 6: b |= ((u64)end[5]) << 40; fallthrough;
  93. case 5: b |= ((u64)end[4]) << 32; fallthrough;
  94. case 4: b |= get_unaligned_le32(end); break;
  95. case 3: b |= ((u64)end[2]) << 16; fallthrough;
  96. case 2: b |= get_unaligned_le16(end); break;
  97. case 1: b |= end[0];
  98. }
  99. #endif
  100. POSTAMBLE
  101. }
  102. EXPORT_SYMBOL(__siphash_unaligned);
  103. /**
  104. * siphash_1u64 - compute 64-bit siphash PRF value of a u64
  105. * @first: first u64
  106. * @key: the siphash key
  107. */
  108. u64 siphash_1u64(const u64 first, const siphash_key_t *key)
  109. {
  110. PREAMBLE(8)
  111. v3 ^= first;
  112. SIPROUND;
  113. SIPROUND;
  114. v0 ^= first;
  115. POSTAMBLE
  116. }
  117. EXPORT_SYMBOL(siphash_1u64);
  118. /**
  119. * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
  120. * @first: first u64
  121. * @second: second u64
  122. * @key: the siphash key
  123. */
  124. u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
  125. {
  126. PREAMBLE(16)
  127. v3 ^= first;
  128. SIPROUND;
  129. SIPROUND;
  130. v0 ^= first;
  131. v3 ^= second;
  132. SIPROUND;
  133. SIPROUND;
  134. v0 ^= second;
  135. POSTAMBLE
  136. }
  137. EXPORT_SYMBOL(siphash_2u64);
  138. /**
  139. * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
  140. * @first: first u64
  141. * @second: second u64
  142. * @third: third u64
  143. * @key: the siphash key
  144. */
  145. u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
  146. const siphash_key_t *key)
  147. {
  148. PREAMBLE(24)
  149. v3 ^= first;
  150. SIPROUND;
  151. SIPROUND;
  152. v0 ^= first;
  153. v3 ^= second;
  154. SIPROUND;
  155. SIPROUND;
  156. v0 ^= second;
  157. v3 ^= third;
  158. SIPROUND;
  159. SIPROUND;
  160. v0 ^= third;
  161. POSTAMBLE
  162. }
  163. EXPORT_SYMBOL(siphash_3u64);
  164. /**
  165. * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
  166. * @first: first u64
  167. * @second: second u64
  168. * @third: third u64
  169. * @forth: forth u64
  170. * @key: the siphash key
  171. */
  172. u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
  173. const u64 forth, const siphash_key_t *key)
  174. {
  175. PREAMBLE(32)
  176. v3 ^= first;
  177. SIPROUND;
  178. SIPROUND;
  179. v0 ^= first;
  180. v3 ^= second;
  181. SIPROUND;
  182. SIPROUND;
  183. v0 ^= second;
  184. v3 ^= third;
  185. SIPROUND;
  186. SIPROUND;
  187. v0 ^= third;
  188. v3 ^= forth;
  189. SIPROUND;
  190. SIPROUND;
  191. v0 ^= forth;
  192. POSTAMBLE
  193. }
  194. EXPORT_SYMBOL(siphash_4u64);
  195. u64 siphash_1u32(const u32 first, const siphash_key_t *key)
  196. {
  197. PREAMBLE(4)
  198. b |= first;
  199. POSTAMBLE
  200. }
  201. EXPORT_SYMBOL(siphash_1u32);
  202. u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
  203. const siphash_key_t *key)
  204. {
  205. u64 combined = (u64)second << 32 | first;
  206. PREAMBLE(12)
  207. v3 ^= combined;
  208. SIPROUND;
  209. SIPROUND;
  210. v0 ^= combined;
  211. b |= third;
  212. POSTAMBLE
  213. }
  214. EXPORT_SYMBOL(siphash_3u32);
  215. #if BITS_PER_LONG == 64
  216. /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
  217. * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
  218. */
  219. #define HSIPROUND SIPROUND
  220. #define HPREAMBLE(len) PREAMBLE(len)
  221. #define HPOSTAMBLE \
  222. v3 ^= b; \
  223. HSIPROUND; \
  224. v0 ^= b; \
  225. v2 ^= 0xff; \
  226. HSIPROUND; \
  227. HSIPROUND; \
  228. HSIPROUND; \
  229. return (v0 ^ v1) ^ (v2 ^ v3);
  230. #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  231. u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
  232. {
  233. const u8 *end = data + len - (len % sizeof(u64));
  234. const u8 left = len & (sizeof(u64) - 1);
  235. u64 m;
  236. HPREAMBLE(len)
  237. for (; data != end; data += sizeof(u64)) {
  238. m = le64_to_cpup(data);
  239. v3 ^= m;
  240. HSIPROUND;
  241. v0 ^= m;
  242. }
  243. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  244. if (left)
  245. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  246. bytemask_from_count(left)));
  247. #else
  248. switch (left) {
  249. case 7: b |= ((u64)end[6]) << 48; fallthrough;
  250. case 6: b |= ((u64)end[5]) << 40; fallthrough;
  251. case 5: b |= ((u64)end[4]) << 32; fallthrough;
  252. case 4: b |= le32_to_cpup(data); break;
  253. case 3: b |= ((u64)end[2]) << 16; fallthrough;
  254. case 2: b |= le16_to_cpup(data); break;
  255. case 1: b |= end[0];
  256. }
  257. #endif
  258. HPOSTAMBLE
  259. }
  260. EXPORT_SYMBOL(__hsiphash_aligned);
  261. #endif
  262. u32 __hsiphash_unaligned(const void *data, size_t len,
  263. const hsiphash_key_t *key)
  264. {
  265. const u8 *end = data + len - (len % sizeof(u64));
  266. const u8 left = len & (sizeof(u64) - 1);
  267. u64 m;
  268. HPREAMBLE(len)
  269. for (; data != end; data += sizeof(u64)) {
  270. m = get_unaligned_le64(data);
  271. v3 ^= m;
  272. HSIPROUND;
  273. v0 ^= m;
  274. }
  275. #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
  276. if (left)
  277. b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
  278. bytemask_from_count(left)));
  279. #else
  280. switch (left) {
  281. case 7: b |= ((u64)end[6]) << 48; fallthrough;
  282. case 6: b |= ((u64)end[5]) << 40; fallthrough;
  283. case 5: b |= ((u64)end[4]) << 32; fallthrough;
  284. case 4: b |= get_unaligned_le32(end); break;
  285. case 3: b |= ((u64)end[2]) << 16; fallthrough;
  286. case 2: b |= get_unaligned_le16(end); break;
  287. case 1: b |= end[0];
  288. }
  289. #endif
  290. HPOSTAMBLE
  291. }
  292. EXPORT_SYMBOL(__hsiphash_unaligned);
  293. /**
  294. * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
  295. * @first: first u32
  296. * @key: the hsiphash key
  297. */
  298. u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
  299. {
  300. HPREAMBLE(4)
  301. b |= first;
  302. HPOSTAMBLE
  303. }
  304. EXPORT_SYMBOL(hsiphash_1u32);
  305. /**
  306. * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
  307. * @first: first u32
  308. * @second: second u32
  309. * @key: the hsiphash key
  310. */
  311. u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
  312. {
  313. u64 combined = (u64)second << 32 | first;
  314. HPREAMBLE(8)
  315. v3 ^= combined;
  316. HSIPROUND;
  317. v0 ^= combined;
  318. HPOSTAMBLE
  319. }
  320. EXPORT_SYMBOL(hsiphash_2u32);
  321. /**
  322. * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
  323. * @first: first u32
  324. * @second: second u32
  325. * @third: third u32
  326. * @key: the hsiphash key
  327. */
  328. u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
  329. const hsiphash_key_t *key)
  330. {
  331. u64 combined = (u64)second << 32 | first;
  332. HPREAMBLE(12)
  333. v3 ^= combined;
  334. HSIPROUND;
  335. v0 ^= combined;
  336. b |= third;
  337. HPOSTAMBLE
  338. }
  339. EXPORT_SYMBOL(hsiphash_3u32);
  340. /**
  341. * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
  342. * @first: first u32
  343. * @second: second u32
  344. * @third: third u32
  345. * @forth: forth u32
  346. * @key: the hsiphash key
  347. */
  348. u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
  349. const u32 forth, const hsiphash_key_t *key)
  350. {
  351. u64 combined = (u64)second << 32 | first;
  352. HPREAMBLE(16)
  353. v3 ^= combined;
  354. HSIPROUND;
  355. v0 ^= combined;
  356. combined = (u64)forth << 32 | third;
  357. v3 ^= combined;
  358. HSIPROUND;
  359. v0 ^= combined;
  360. HPOSTAMBLE
  361. }
  362. EXPORT_SYMBOL(hsiphash_4u32);
  363. #else
  364. #define HSIPROUND HSIPHASH_PERMUTATION(v0, v1, v2, v3)
  365. #define HPREAMBLE(len) \
  366. u32 v0 = HSIPHASH_CONST_0; \
  367. u32 v1 = HSIPHASH_CONST_1; \
  368. u32 v2 = HSIPHASH_CONST_2; \
  369. u32 v3 = HSIPHASH_CONST_3; \
  370. u32 b = ((u32)(len)) << 24; \
  371. v3 ^= key->key[1]; \
  372. v2 ^= key->key[0]; \
  373. v1 ^= key->key[1]; \
  374. v0 ^= key->key[0];
  375. #define HPOSTAMBLE \
  376. v3 ^= b; \
  377. HSIPROUND; \
  378. v0 ^= b; \
  379. v2 ^= 0xff; \
  380. HSIPROUND; \
  381. HSIPROUND; \
  382. HSIPROUND; \
  383. return v1 ^ v3;
  384. #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
  385. u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
  386. {
  387. const u8 *end = data + len - (len % sizeof(u32));
  388. const u8 left = len & (sizeof(u32) - 1);
  389. u32 m;
  390. HPREAMBLE(len)
  391. for (; data != end; data += sizeof(u32)) {
  392. m = le32_to_cpup(data);
  393. v3 ^= m;
  394. HSIPROUND;
  395. v0 ^= m;
  396. }
  397. switch (left) {
  398. case 3: b |= ((u32)end[2]) << 16; fallthrough;
  399. case 2: b |= le16_to_cpup(data); break;
  400. case 1: b |= end[0];
  401. }
  402. HPOSTAMBLE
  403. }
  404. EXPORT_SYMBOL(__hsiphash_aligned);
  405. #endif
  406. u32 __hsiphash_unaligned(const void *data, size_t len,
  407. const hsiphash_key_t *key)
  408. {
  409. const u8 *end = data + len - (len % sizeof(u32));
  410. const u8 left = len & (sizeof(u32) - 1);
  411. u32 m;
  412. HPREAMBLE(len)
  413. for (; data != end; data += sizeof(u32)) {
  414. m = get_unaligned_le32(data);
  415. v3 ^= m;
  416. HSIPROUND;
  417. v0 ^= m;
  418. }
  419. switch (left) {
  420. case 3: b |= ((u32)end[2]) << 16; fallthrough;
  421. case 2: b |= get_unaligned_le16(end); break;
  422. case 1: b |= end[0];
  423. }
  424. HPOSTAMBLE
  425. }
  426. EXPORT_SYMBOL(__hsiphash_unaligned);
  427. /**
  428. * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
  429. * @first: first u32
  430. * @key: the hsiphash key
  431. */
  432. u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
  433. {
  434. HPREAMBLE(4)
  435. v3 ^= first;
  436. HSIPROUND;
  437. v0 ^= first;
  438. HPOSTAMBLE
  439. }
  440. EXPORT_SYMBOL(hsiphash_1u32);
  441. /**
  442. * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
  443. * @first: first u32
  444. * @second: second u32
  445. * @key: the hsiphash key
  446. */
  447. u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
  448. {
  449. HPREAMBLE(8)
  450. v3 ^= first;
  451. HSIPROUND;
  452. v0 ^= first;
  453. v3 ^= second;
  454. HSIPROUND;
  455. v0 ^= second;
  456. HPOSTAMBLE
  457. }
  458. EXPORT_SYMBOL(hsiphash_2u32);
  459. /**
  460. * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
  461. * @first: first u32
  462. * @second: second u32
  463. * @third: third u32
  464. * @key: the hsiphash key
  465. */
  466. u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
  467. const hsiphash_key_t *key)
  468. {
  469. HPREAMBLE(12)
  470. v3 ^= first;
  471. HSIPROUND;
  472. v0 ^= first;
  473. v3 ^= second;
  474. HSIPROUND;
  475. v0 ^= second;
  476. v3 ^= third;
  477. HSIPROUND;
  478. v0 ^= third;
  479. HPOSTAMBLE
  480. }
  481. EXPORT_SYMBOL(hsiphash_3u32);
  482. /**
  483. * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
  484. * @first: first u32
  485. * @second: second u32
  486. * @third: third u32
  487. * @forth: forth u32
  488. * @key: the hsiphash key
  489. */
  490. u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
  491. const u32 forth, const hsiphash_key_t *key)
  492. {
  493. HPREAMBLE(16)
  494. v3 ^= first;
  495. HSIPROUND;
  496. v0 ^= first;
  497. v3 ^= second;
  498. HSIPROUND;
  499. v0 ^= second;
  500. v3 ^= third;
  501. HSIPROUND;
  502. v0 ^= third;
  503. v3 ^= forth;
  504. HSIPROUND;
  505. v0 ^= forth;
  506. HPOSTAMBLE
  507. }
  508. EXPORT_SYMBOL(hsiphash_4u32);
  509. #endif