serpent-avx2-asm_64.S 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * x86_64/AVX2 assembler optimized version of Serpent
  4. *
  5. * Copyright © 2012-2013 Jussi Kivilinna <[email protected]>
  6. *
  7. * Based on AVX assembler implementation of Serpent by:
  8. * Copyright © 2012 Johannes Goetzfried
  9. * <[email protected]>
  10. */
  11. #include <linux/linkage.h>
  12. #include <asm/frame.h>
  13. #include "glue_helper-asm-avx2.S"
  14. .file "serpent-avx2-asm_64.S"
  15. .section .rodata.cst16.bswap128_mask, "aM", @progbits, 16
  16. .align 16
  17. .Lbswap128_mask:
  18. .byte 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
  19. .text
  20. #define CTX %rdi
  21. #define RNOT %ymm0
  22. #define tp %ymm1
  23. #define RA1 %ymm2
  24. #define RA2 %ymm3
  25. #define RB1 %ymm4
  26. #define RB2 %ymm5
  27. #define RC1 %ymm6
  28. #define RC2 %ymm7
  29. #define RD1 %ymm8
  30. #define RD2 %ymm9
  31. #define RE1 %ymm10
  32. #define RE2 %ymm11
  33. #define RK0 %ymm12
  34. #define RK1 %ymm13
  35. #define RK2 %ymm14
  36. #define RK3 %ymm15
  37. #define RK0x %xmm12
  38. #define RK1x %xmm13
  39. #define RK2x %xmm14
  40. #define RK3x %xmm15
  41. #define S0_1(x0, x1, x2, x3, x4) \
  42. vpor x0, x3, tp; \
  43. vpxor x3, x0, x0; \
  44. vpxor x2, x3, x4; \
  45. vpxor RNOT, x4, x4; \
  46. vpxor x1, tp, x3; \
  47. vpand x0, x1, x1; \
  48. vpxor x4, x1, x1; \
  49. vpxor x0, x2, x2;
  50. #define S0_2(x0, x1, x2, x3, x4) \
  51. vpxor x3, x0, x0; \
  52. vpor x0, x4, x4; \
  53. vpxor x2, x0, x0; \
  54. vpand x1, x2, x2; \
  55. vpxor x2, x3, x3; \
  56. vpxor RNOT, x1, x1; \
  57. vpxor x4, x2, x2; \
  58. vpxor x2, x1, x1;
  59. #define S1_1(x0, x1, x2, x3, x4) \
  60. vpxor x0, x1, tp; \
  61. vpxor x3, x0, x0; \
  62. vpxor RNOT, x3, x3; \
  63. vpand tp, x1, x4; \
  64. vpor tp, x0, x0; \
  65. vpxor x2, x3, x3; \
  66. vpxor x3, x0, x0; \
  67. vpxor x3, tp, x1;
  68. #define S1_2(x0, x1, x2, x3, x4) \
  69. vpxor x4, x3, x3; \
  70. vpor x4, x1, x1; \
  71. vpxor x2, x4, x4; \
  72. vpand x0, x2, x2; \
  73. vpxor x1, x2, x2; \
  74. vpor x0, x1, x1; \
  75. vpxor RNOT, x0, x0; \
  76. vpxor x2, x0, x0; \
  77. vpxor x1, x4, x4;
  78. #define S2_1(x0, x1, x2, x3, x4) \
  79. vpxor RNOT, x3, x3; \
  80. vpxor x0, x1, x1; \
  81. vpand x2, x0, tp; \
  82. vpxor x3, tp, tp; \
  83. vpor x0, x3, x3; \
  84. vpxor x1, x2, x2; \
  85. vpxor x1, x3, x3; \
  86. vpand tp, x1, x1;
  87. #define S2_2(x0, x1, x2, x3, x4) \
  88. vpxor x2, tp, tp; \
  89. vpand x3, x2, x2; \
  90. vpor x1, x3, x3; \
  91. vpxor RNOT, tp, tp; \
  92. vpxor tp, x3, x3; \
  93. vpxor tp, x0, x4; \
  94. vpxor x2, tp, x0; \
  95. vpor x2, x1, x1;
  96. #define S3_1(x0, x1, x2, x3, x4) \
  97. vpxor x3, x1, tp; \
  98. vpor x0, x3, x3; \
  99. vpand x0, x1, x4; \
  100. vpxor x2, x0, x0; \
  101. vpxor tp, x2, x2; \
  102. vpand x3, tp, x1; \
  103. vpxor x3, x2, x2; \
  104. vpor x4, x0, x0; \
  105. vpxor x3, x4, x4;
  106. #define S3_2(x0, x1, x2, x3, x4) \
  107. vpxor x0, x1, x1; \
  108. vpand x3, x0, x0; \
  109. vpand x4, x3, x3; \
  110. vpxor x2, x3, x3; \
  111. vpor x1, x4, x4; \
  112. vpand x1, x2, x2; \
  113. vpxor x3, x4, x4; \
  114. vpxor x3, x0, x0; \
  115. vpxor x2, x3, x3;
  116. #define S4_1(x0, x1, x2, x3, x4) \
  117. vpand x0, x3, tp; \
  118. vpxor x3, x0, x0; \
  119. vpxor x2, tp, tp; \
  120. vpor x3, x2, x2; \
  121. vpxor x1, x0, x0; \
  122. vpxor tp, x3, x4; \
  123. vpor x0, x2, x2; \
  124. vpxor x1, x2, x2;
  125. #define S4_2(x0, x1, x2, x3, x4) \
  126. vpand x0, x1, x1; \
  127. vpxor x4, x1, x1; \
  128. vpand x2, x4, x4; \
  129. vpxor tp, x2, x2; \
  130. vpxor x0, x4, x4; \
  131. vpor x1, tp, x3; \
  132. vpxor RNOT, x1, x1; \
  133. vpxor x0, x3, x3;
  134. #define S5_1(x0, x1, x2, x3, x4) \
  135. vpor x0, x1, tp; \
  136. vpxor tp, x2, x2; \
  137. vpxor RNOT, x3, x3; \
  138. vpxor x0, x1, x4; \
  139. vpxor x2, x0, x0; \
  140. vpand x4, tp, x1; \
  141. vpor x3, x4, x4; \
  142. vpxor x0, x4, x4;
  143. #define S5_2(x0, x1, x2, x3, x4) \
  144. vpand x3, x0, x0; \
  145. vpxor x3, x1, x1; \
  146. vpxor x2, x3, x3; \
  147. vpxor x1, x0, x0; \
  148. vpand x4, x2, x2; \
  149. vpxor x2, x1, x1; \
  150. vpand x0, x2, x2; \
  151. vpxor x2, x3, x3;
  152. #define S6_1(x0, x1, x2, x3, x4) \
  153. vpxor x0, x3, x3; \
  154. vpxor x2, x1, tp; \
  155. vpxor x0, x2, x2; \
  156. vpand x3, x0, x0; \
  157. vpor x3, tp, tp; \
  158. vpxor RNOT, x1, x4; \
  159. vpxor tp, x0, x0; \
  160. vpxor x2, tp, x1;
  161. #define S6_2(x0, x1, x2, x3, x4) \
  162. vpxor x4, x3, x3; \
  163. vpxor x0, x4, x4; \
  164. vpand x0, x2, x2; \
  165. vpxor x1, x4, x4; \
  166. vpxor x3, x2, x2; \
  167. vpand x1, x3, x3; \
  168. vpxor x0, x3, x3; \
  169. vpxor x2, x1, x1;
  170. #define S7_1(x0, x1, x2, x3, x4) \
  171. vpxor RNOT, x1, tp; \
  172. vpxor RNOT, x0, x0; \
  173. vpand x2, tp, x1; \
  174. vpxor x3, x1, x1; \
  175. vpor tp, x3, x3; \
  176. vpxor x2, tp, x4; \
  177. vpxor x3, x2, x2; \
  178. vpxor x0, x3, x3; \
  179. vpor x1, x0, x0;
  180. #define S7_2(x0, x1, x2, x3, x4) \
  181. vpand x0, x2, x2; \
  182. vpxor x4, x0, x0; \
  183. vpxor x3, x4, x4; \
  184. vpand x0, x3, x3; \
  185. vpxor x1, x4, x4; \
  186. vpxor x4, x2, x2; \
  187. vpxor x1, x3, x3; \
  188. vpor x0, x4, x4; \
  189. vpxor x1, x4, x4;
  190. #define SI0_1(x0, x1, x2, x3, x4) \
  191. vpxor x0, x1, x1; \
  192. vpor x1, x3, tp; \
  193. vpxor x1, x3, x4; \
  194. vpxor RNOT, x0, x0; \
  195. vpxor tp, x2, x2; \
  196. vpxor x0, tp, x3; \
  197. vpand x1, x0, x0; \
  198. vpxor x2, x0, x0;
  199. #define SI0_2(x0, x1, x2, x3, x4) \
  200. vpand x3, x2, x2; \
  201. vpxor x4, x3, x3; \
  202. vpxor x3, x2, x2; \
  203. vpxor x3, x1, x1; \
  204. vpand x0, x3, x3; \
  205. vpxor x0, x1, x1; \
  206. vpxor x2, x0, x0; \
  207. vpxor x3, x4, x4;
  208. #define SI1_1(x0, x1, x2, x3, x4) \
  209. vpxor x3, x1, x1; \
  210. vpxor x2, x0, tp; \
  211. vpxor RNOT, x2, x2; \
  212. vpor x1, x0, x4; \
  213. vpxor x3, x4, x4; \
  214. vpand x1, x3, x3; \
  215. vpxor x2, x1, x1; \
  216. vpand x4, x2, x2;
  217. #define SI1_2(x0, x1, x2, x3, x4) \
  218. vpxor x1, x4, x4; \
  219. vpor x3, x1, x1; \
  220. vpxor tp, x3, x3; \
  221. vpxor tp, x2, x2; \
  222. vpor x4, tp, x0; \
  223. vpxor x4, x2, x2; \
  224. vpxor x0, x1, x1; \
  225. vpxor x1, x4, x4;
  226. #define SI2_1(x0, x1, x2, x3, x4) \
  227. vpxor x1, x2, x2; \
  228. vpxor RNOT, x3, tp; \
  229. vpor x2, tp, tp; \
  230. vpxor x3, x2, x2; \
  231. vpxor x0, x3, x4; \
  232. vpxor x1, tp, x3; \
  233. vpor x2, x1, x1; \
  234. vpxor x0, x2, x2;
  235. #define SI2_2(x0, x1, x2, x3, x4) \
  236. vpxor x4, x1, x1; \
  237. vpor x3, x4, x4; \
  238. vpxor x3, x2, x2; \
  239. vpxor x2, x4, x4; \
  240. vpand x1, x2, x2; \
  241. vpxor x3, x2, x2; \
  242. vpxor x4, x3, x3; \
  243. vpxor x0, x4, x4;
  244. #define SI3_1(x0, x1, x2, x3, x4) \
  245. vpxor x1, x2, x2; \
  246. vpand x2, x1, tp; \
  247. vpxor x0, tp, tp; \
  248. vpor x1, x0, x0; \
  249. vpxor x3, x1, x4; \
  250. vpxor x3, x0, x0; \
  251. vpor tp, x3, x3; \
  252. vpxor x2, tp, x1;
  253. #define SI3_2(x0, x1, x2, x3, x4) \
  254. vpxor x3, x1, x1; \
  255. vpxor x2, x0, x0; \
  256. vpxor x3, x2, x2; \
  257. vpand x1, x3, x3; \
  258. vpxor x0, x1, x1; \
  259. vpand x2, x0, x0; \
  260. vpxor x3, x4, x4; \
  261. vpxor x0, x3, x3; \
  262. vpxor x1, x0, x0;
  263. #define SI4_1(x0, x1, x2, x3, x4) \
  264. vpxor x3, x2, x2; \
  265. vpand x1, x0, tp; \
  266. vpxor x2, tp, tp; \
  267. vpor x3, x2, x2; \
  268. vpxor RNOT, x0, x4; \
  269. vpxor tp, x1, x1; \
  270. vpxor x2, tp, x0; \
  271. vpand x4, x2, x2;
  272. #define SI4_2(x0, x1, x2, x3, x4) \
  273. vpxor x0, x2, x2; \
  274. vpor x4, x0, x0; \
  275. vpxor x3, x0, x0; \
  276. vpand x2, x3, x3; \
  277. vpxor x3, x4, x4; \
  278. vpxor x1, x3, x3; \
  279. vpand x0, x1, x1; \
  280. vpxor x1, x4, x4; \
  281. vpxor x3, x0, x0;
  282. #define SI5_1(x0, x1, x2, x3, x4) \
  283. vpor x2, x1, tp; \
  284. vpxor x1, x2, x2; \
  285. vpxor x3, tp, tp; \
  286. vpand x1, x3, x3; \
  287. vpxor x3, x2, x2; \
  288. vpor x0, x3, x3; \
  289. vpxor RNOT, x0, x0; \
  290. vpxor x2, x3, x3; \
  291. vpor x0, x2, x2;
  292. #define SI5_2(x0, x1, x2, x3, x4) \
  293. vpxor tp, x1, x4; \
  294. vpxor x4, x2, x2; \
  295. vpand x0, x4, x4; \
  296. vpxor tp, x0, x0; \
  297. vpxor x3, tp, x1; \
  298. vpand x2, x0, x0; \
  299. vpxor x3, x2, x2; \
  300. vpxor x2, x0, x0; \
  301. vpxor x4, x2, x2; \
  302. vpxor x3, x4, x4;
  303. #define SI6_1(x0, x1, x2, x3, x4) \
  304. vpxor x2, x0, x0; \
  305. vpand x3, x0, tp; \
  306. vpxor x3, x2, x2; \
  307. vpxor x2, tp, tp; \
  308. vpxor x1, x3, x3; \
  309. vpor x0, x2, x2; \
  310. vpxor x3, x2, x2; \
  311. vpand tp, x3, x3;
  312. #define SI6_2(x0, x1, x2, x3, x4) \
  313. vpxor RNOT, tp, tp; \
  314. vpxor x1, x3, x3; \
  315. vpand x2, x1, x1; \
  316. vpxor tp, x0, x4; \
  317. vpxor x4, x3, x3; \
  318. vpxor x2, x4, x4; \
  319. vpxor x1, tp, x0; \
  320. vpxor x0, x2, x2;
  321. #define SI7_1(x0, x1, x2, x3, x4) \
  322. vpand x0, x3, tp; \
  323. vpxor x2, x0, x0; \
  324. vpor x3, x2, x2; \
  325. vpxor x1, x3, x4; \
  326. vpxor RNOT, x0, x0; \
  327. vpor tp, x1, x1; \
  328. vpxor x0, x4, x4; \
  329. vpand x2, x0, x0; \
  330. vpxor x1, x0, x0;
  331. #define SI7_2(x0, x1, x2, x3, x4) \
  332. vpand x2, x1, x1; \
  333. vpxor x2, tp, x3; \
  334. vpxor x3, x4, x4; \
  335. vpand x3, x2, x2; \
  336. vpor x0, x3, x3; \
  337. vpxor x4, x1, x1; \
  338. vpxor x4, x3, x3; \
  339. vpand x0, x4, x4; \
  340. vpxor x2, x4, x4;
  341. #define get_key(i,j,t) \
  342. vpbroadcastd (4*(i)+(j))*4(CTX), t;
  343. #define K2(x0, x1, x2, x3, x4, i) \
  344. get_key(i, 0, RK0); \
  345. get_key(i, 1, RK1); \
  346. get_key(i, 2, RK2); \
  347. get_key(i, 3, RK3); \
  348. vpxor RK0, x0 ## 1, x0 ## 1; \
  349. vpxor RK1, x1 ## 1, x1 ## 1; \
  350. vpxor RK2, x2 ## 1, x2 ## 1; \
  351. vpxor RK3, x3 ## 1, x3 ## 1; \
  352. vpxor RK0, x0 ## 2, x0 ## 2; \
  353. vpxor RK1, x1 ## 2, x1 ## 2; \
  354. vpxor RK2, x2 ## 2, x2 ## 2; \
  355. vpxor RK3, x3 ## 2, x3 ## 2;
  356. #define LK2(x0, x1, x2, x3, x4, i) \
  357. vpslld $13, x0 ## 1, x4 ## 1; \
  358. vpsrld $(32 - 13), x0 ## 1, x0 ## 1; \
  359. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  360. vpxor x0 ## 1, x1 ## 1, x1 ## 1; \
  361. vpslld $3, x2 ## 1, x4 ## 1; \
  362. vpsrld $(32 - 3), x2 ## 1, x2 ## 1; \
  363. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  364. vpxor x2 ## 1, x1 ## 1, x1 ## 1; \
  365. vpslld $13, x0 ## 2, x4 ## 2; \
  366. vpsrld $(32 - 13), x0 ## 2, x0 ## 2; \
  367. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  368. vpxor x0 ## 2, x1 ## 2, x1 ## 2; \
  369. vpslld $3, x2 ## 2, x4 ## 2; \
  370. vpsrld $(32 - 3), x2 ## 2, x2 ## 2; \
  371. vpor x4 ## 2, x2 ## 2, x2 ## 2; \
  372. vpxor x2 ## 2, x1 ## 2, x1 ## 2; \
  373. vpslld $1, x1 ## 1, x4 ## 1; \
  374. vpsrld $(32 - 1), x1 ## 1, x1 ## 1; \
  375. vpor x4 ## 1, x1 ## 1, x1 ## 1; \
  376. vpslld $3, x0 ## 1, x4 ## 1; \
  377. vpxor x2 ## 1, x3 ## 1, x3 ## 1; \
  378. vpxor x4 ## 1, x3 ## 1, x3 ## 1; \
  379. get_key(i, 1, RK1); \
  380. vpslld $1, x1 ## 2, x4 ## 2; \
  381. vpsrld $(32 - 1), x1 ## 2, x1 ## 2; \
  382. vpor x4 ## 2, x1 ## 2, x1 ## 2; \
  383. vpslld $3, x0 ## 2, x4 ## 2; \
  384. vpxor x2 ## 2, x3 ## 2, x3 ## 2; \
  385. vpxor x4 ## 2, x3 ## 2, x3 ## 2; \
  386. get_key(i, 3, RK3); \
  387. vpslld $7, x3 ## 1, x4 ## 1; \
  388. vpsrld $(32 - 7), x3 ## 1, x3 ## 1; \
  389. vpor x4 ## 1, x3 ## 1, x3 ## 1; \
  390. vpslld $7, x1 ## 1, x4 ## 1; \
  391. vpxor x1 ## 1, x0 ## 1, x0 ## 1; \
  392. vpxor x3 ## 1, x0 ## 1, x0 ## 1; \
  393. vpxor x3 ## 1, x2 ## 1, x2 ## 1; \
  394. vpxor x4 ## 1, x2 ## 1, x2 ## 1; \
  395. get_key(i, 0, RK0); \
  396. vpslld $7, x3 ## 2, x4 ## 2; \
  397. vpsrld $(32 - 7), x3 ## 2, x3 ## 2; \
  398. vpor x4 ## 2, x3 ## 2, x3 ## 2; \
  399. vpslld $7, x1 ## 2, x4 ## 2; \
  400. vpxor x1 ## 2, x0 ## 2, x0 ## 2; \
  401. vpxor x3 ## 2, x0 ## 2, x0 ## 2; \
  402. vpxor x3 ## 2, x2 ## 2, x2 ## 2; \
  403. vpxor x4 ## 2, x2 ## 2, x2 ## 2; \
  404. get_key(i, 2, RK2); \
  405. vpxor RK1, x1 ## 1, x1 ## 1; \
  406. vpxor RK3, x3 ## 1, x3 ## 1; \
  407. vpslld $5, x0 ## 1, x4 ## 1; \
  408. vpsrld $(32 - 5), x0 ## 1, x0 ## 1; \
  409. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  410. vpslld $22, x2 ## 1, x4 ## 1; \
  411. vpsrld $(32 - 22), x2 ## 1, x2 ## 1; \
  412. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  413. vpxor RK0, x0 ## 1, x0 ## 1; \
  414. vpxor RK2, x2 ## 1, x2 ## 1; \
  415. vpxor RK1, x1 ## 2, x1 ## 2; \
  416. vpxor RK3, x3 ## 2, x3 ## 2; \
  417. vpslld $5, x0 ## 2, x4 ## 2; \
  418. vpsrld $(32 - 5), x0 ## 2, x0 ## 2; \
  419. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  420. vpslld $22, x2 ## 2, x4 ## 2; \
  421. vpsrld $(32 - 22), x2 ## 2, x2 ## 2; \
  422. vpor x4 ## 2, x2 ## 2, x2 ## 2; \
  423. vpxor RK0, x0 ## 2, x0 ## 2; \
  424. vpxor RK2, x2 ## 2, x2 ## 2;
  425. #define KL2(x0, x1, x2, x3, x4, i) \
  426. vpxor RK0, x0 ## 1, x0 ## 1; \
  427. vpxor RK2, x2 ## 1, x2 ## 1; \
  428. vpsrld $5, x0 ## 1, x4 ## 1; \
  429. vpslld $(32 - 5), x0 ## 1, x0 ## 1; \
  430. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  431. vpxor RK3, x3 ## 1, x3 ## 1; \
  432. vpxor RK1, x1 ## 1, x1 ## 1; \
  433. vpsrld $22, x2 ## 1, x4 ## 1; \
  434. vpslld $(32 - 22), x2 ## 1, x2 ## 1; \
  435. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  436. vpxor x3 ## 1, x2 ## 1, x2 ## 1; \
  437. vpxor RK0, x0 ## 2, x0 ## 2; \
  438. vpxor RK2, x2 ## 2, x2 ## 2; \
  439. vpsrld $5, x0 ## 2, x4 ## 2; \
  440. vpslld $(32 - 5), x0 ## 2, x0 ## 2; \
  441. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  442. vpxor RK3, x3 ## 2, x3 ## 2; \
  443. vpxor RK1, x1 ## 2, x1 ## 2; \
  444. vpsrld $22, x2 ## 2, x4 ## 2; \
  445. vpslld $(32 - 22), x2 ## 2, x2 ## 2; \
  446. vpor x4 ## 2, x2 ## 2, x2 ## 2; \
  447. vpxor x3 ## 2, x2 ## 2, x2 ## 2; \
  448. vpxor x3 ## 1, x0 ## 1, x0 ## 1; \
  449. vpslld $7, x1 ## 1, x4 ## 1; \
  450. vpxor x1 ## 1, x0 ## 1, x0 ## 1; \
  451. vpxor x4 ## 1, x2 ## 1, x2 ## 1; \
  452. vpsrld $1, x1 ## 1, x4 ## 1; \
  453. vpslld $(32 - 1), x1 ## 1, x1 ## 1; \
  454. vpor x4 ## 1, x1 ## 1, x1 ## 1; \
  455. vpxor x3 ## 2, x0 ## 2, x0 ## 2; \
  456. vpslld $7, x1 ## 2, x4 ## 2; \
  457. vpxor x1 ## 2, x0 ## 2, x0 ## 2; \
  458. vpxor x4 ## 2, x2 ## 2, x2 ## 2; \
  459. vpsrld $1, x1 ## 2, x4 ## 2; \
  460. vpslld $(32 - 1), x1 ## 2, x1 ## 2; \
  461. vpor x4 ## 2, x1 ## 2, x1 ## 2; \
  462. vpsrld $7, x3 ## 1, x4 ## 1; \
  463. vpslld $(32 - 7), x3 ## 1, x3 ## 1; \
  464. vpor x4 ## 1, x3 ## 1, x3 ## 1; \
  465. vpxor x0 ## 1, x1 ## 1, x1 ## 1; \
  466. vpslld $3, x0 ## 1, x4 ## 1; \
  467. vpxor x4 ## 1, x3 ## 1, x3 ## 1; \
  468. vpsrld $7, x3 ## 2, x4 ## 2; \
  469. vpslld $(32 - 7), x3 ## 2, x3 ## 2; \
  470. vpor x4 ## 2, x3 ## 2, x3 ## 2; \
  471. vpxor x0 ## 2, x1 ## 2, x1 ## 2; \
  472. vpslld $3, x0 ## 2, x4 ## 2; \
  473. vpxor x4 ## 2, x3 ## 2, x3 ## 2; \
  474. vpsrld $13, x0 ## 1, x4 ## 1; \
  475. vpslld $(32 - 13), x0 ## 1, x0 ## 1; \
  476. vpor x4 ## 1, x0 ## 1, x0 ## 1; \
  477. vpxor x2 ## 1, x1 ## 1, x1 ## 1; \
  478. vpxor x2 ## 1, x3 ## 1, x3 ## 1; \
  479. vpsrld $3, x2 ## 1, x4 ## 1; \
  480. vpslld $(32 - 3), x2 ## 1, x2 ## 1; \
  481. vpor x4 ## 1, x2 ## 1, x2 ## 1; \
  482. vpsrld $13, x0 ## 2, x4 ## 2; \
  483. vpslld $(32 - 13), x0 ## 2, x0 ## 2; \
  484. vpor x4 ## 2, x0 ## 2, x0 ## 2; \
  485. vpxor x2 ## 2, x1 ## 2, x1 ## 2; \
  486. vpxor x2 ## 2, x3 ## 2, x3 ## 2; \
  487. vpsrld $3, x2 ## 2, x4 ## 2; \
  488. vpslld $(32 - 3), x2 ## 2, x2 ## 2; \
  489. vpor x4 ## 2, x2 ## 2, x2 ## 2;
  490. #define S(SBOX, x0, x1, x2, x3, x4) \
  491. SBOX ## _1(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  492. SBOX ## _2(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  493. SBOX ## _1(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
  494. SBOX ## _2(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2);
  495. #define SP(SBOX, x0, x1, x2, x3, x4, i) \
  496. get_key(i, 0, RK0); \
  497. SBOX ## _1(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  498. get_key(i, 2, RK2); \
  499. SBOX ## _2(x0 ## 1, x1 ## 1, x2 ## 1, x3 ## 1, x4 ## 1); \
  500. get_key(i, 3, RK3); \
  501. SBOX ## _1(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
  502. get_key(i, 1, RK1); \
  503. SBOX ## _2(x0 ## 2, x1 ## 2, x2 ## 2, x3 ## 2, x4 ## 2); \
  504. #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \
  505. vpunpckldq x1, x0, t0; \
  506. vpunpckhdq x1, x0, t2; \
  507. vpunpckldq x3, x2, t1; \
  508. vpunpckhdq x3, x2, x3; \
  509. \
  510. vpunpcklqdq t1, t0, x0; \
  511. vpunpckhqdq t1, t0, x1; \
  512. vpunpcklqdq x3, t2, x2; \
  513. vpunpckhqdq x3, t2, x3;
  514. #define read_blocks(x0, x1, x2, x3, t0, t1, t2) \
  515. transpose_4x4(x0, x1, x2, x3, t0, t1, t2)
  516. #define write_blocks(x0, x1, x2, x3, t0, t1, t2) \
  517. transpose_4x4(x0, x1, x2, x3, t0, t1, t2)
  518. .align 8
  519. SYM_FUNC_START_LOCAL(__serpent_enc_blk16)
  520. /* input:
  521. * %rdi: ctx, CTX
  522. * RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2: plaintext
  523. * output:
  524. * RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2: ciphertext
  525. */
  526. vpcmpeqd RNOT, RNOT, RNOT;
  527. read_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  528. read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  529. K2(RA, RB, RC, RD, RE, 0);
  530. S(S0, RA, RB, RC, RD, RE); LK2(RC, RB, RD, RA, RE, 1);
  531. S(S1, RC, RB, RD, RA, RE); LK2(RE, RD, RA, RC, RB, 2);
  532. S(S2, RE, RD, RA, RC, RB); LK2(RB, RD, RE, RC, RA, 3);
  533. S(S3, RB, RD, RE, RC, RA); LK2(RC, RA, RD, RB, RE, 4);
  534. S(S4, RC, RA, RD, RB, RE); LK2(RA, RD, RB, RE, RC, 5);
  535. S(S5, RA, RD, RB, RE, RC); LK2(RC, RA, RD, RE, RB, 6);
  536. S(S6, RC, RA, RD, RE, RB); LK2(RD, RB, RA, RE, RC, 7);
  537. S(S7, RD, RB, RA, RE, RC); LK2(RC, RA, RE, RD, RB, 8);
  538. S(S0, RC, RA, RE, RD, RB); LK2(RE, RA, RD, RC, RB, 9);
  539. S(S1, RE, RA, RD, RC, RB); LK2(RB, RD, RC, RE, RA, 10);
  540. S(S2, RB, RD, RC, RE, RA); LK2(RA, RD, RB, RE, RC, 11);
  541. S(S3, RA, RD, RB, RE, RC); LK2(RE, RC, RD, RA, RB, 12);
  542. S(S4, RE, RC, RD, RA, RB); LK2(RC, RD, RA, RB, RE, 13);
  543. S(S5, RC, RD, RA, RB, RE); LK2(RE, RC, RD, RB, RA, 14);
  544. S(S6, RE, RC, RD, RB, RA); LK2(RD, RA, RC, RB, RE, 15);
  545. S(S7, RD, RA, RC, RB, RE); LK2(RE, RC, RB, RD, RA, 16);
  546. S(S0, RE, RC, RB, RD, RA); LK2(RB, RC, RD, RE, RA, 17);
  547. S(S1, RB, RC, RD, RE, RA); LK2(RA, RD, RE, RB, RC, 18);
  548. S(S2, RA, RD, RE, RB, RC); LK2(RC, RD, RA, RB, RE, 19);
  549. S(S3, RC, RD, RA, RB, RE); LK2(RB, RE, RD, RC, RA, 20);
  550. S(S4, RB, RE, RD, RC, RA); LK2(RE, RD, RC, RA, RB, 21);
  551. S(S5, RE, RD, RC, RA, RB); LK2(RB, RE, RD, RA, RC, 22);
  552. S(S6, RB, RE, RD, RA, RC); LK2(RD, RC, RE, RA, RB, 23);
  553. S(S7, RD, RC, RE, RA, RB); LK2(RB, RE, RA, RD, RC, 24);
  554. S(S0, RB, RE, RA, RD, RC); LK2(RA, RE, RD, RB, RC, 25);
  555. S(S1, RA, RE, RD, RB, RC); LK2(RC, RD, RB, RA, RE, 26);
  556. S(S2, RC, RD, RB, RA, RE); LK2(RE, RD, RC, RA, RB, 27);
  557. S(S3, RE, RD, RC, RA, RB); LK2(RA, RB, RD, RE, RC, 28);
  558. S(S4, RA, RB, RD, RE, RC); LK2(RB, RD, RE, RC, RA, 29);
  559. S(S5, RB, RD, RE, RC, RA); LK2(RA, RB, RD, RC, RE, 30);
  560. S(S6, RA, RB, RD, RC, RE); LK2(RD, RE, RB, RC, RA, 31);
  561. S(S7, RD, RE, RB, RC, RA); K2(RA, RB, RC, RD, RE, 32);
  562. write_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  563. write_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  564. RET;
  565. SYM_FUNC_END(__serpent_enc_blk16)
  566. .align 8
  567. SYM_FUNC_START_LOCAL(__serpent_dec_blk16)
  568. /* input:
  569. * %rdi: ctx, CTX
  570. * RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2: ciphertext
  571. * output:
  572. * RC1, RD1, RB1, RE1, RC2, RD2, RB2, RE2: plaintext
  573. */
  574. vpcmpeqd RNOT, RNOT, RNOT;
  575. read_blocks(RA1, RB1, RC1, RD1, RK0, RK1, RK2);
  576. read_blocks(RA2, RB2, RC2, RD2, RK0, RK1, RK2);
  577. K2(RA, RB, RC, RD, RE, 32);
  578. SP(SI7, RA, RB, RC, RD, RE, 31); KL2(RB, RD, RA, RE, RC, 31);
  579. SP(SI6, RB, RD, RA, RE, RC, 30); KL2(RA, RC, RE, RB, RD, 30);
  580. SP(SI5, RA, RC, RE, RB, RD, 29); KL2(RC, RD, RA, RE, RB, 29);
  581. SP(SI4, RC, RD, RA, RE, RB, 28); KL2(RC, RA, RB, RE, RD, 28);
  582. SP(SI3, RC, RA, RB, RE, RD, 27); KL2(RB, RC, RD, RE, RA, 27);
  583. SP(SI2, RB, RC, RD, RE, RA, 26); KL2(RC, RA, RE, RD, RB, 26);
  584. SP(SI1, RC, RA, RE, RD, RB, 25); KL2(RB, RA, RE, RD, RC, 25);
  585. SP(SI0, RB, RA, RE, RD, RC, 24); KL2(RE, RC, RA, RB, RD, 24);
  586. SP(SI7, RE, RC, RA, RB, RD, 23); KL2(RC, RB, RE, RD, RA, 23);
  587. SP(SI6, RC, RB, RE, RD, RA, 22); KL2(RE, RA, RD, RC, RB, 22);
  588. SP(SI5, RE, RA, RD, RC, RB, 21); KL2(RA, RB, RE, RD, RC, 21);
  589. SP(SI4, RA, RB, RE, RD, RC, 20); KL2(RA, RE, RC, RD, RB, 20);
  590. SP(SI3, RA, RE, RC, RD, RB, 19); KL2(RC, RA, RB, RD, RE, 19);
  591. SP(SI2, RC, RA, RB, RD, RE, 18); KL2(RA, RE, RD, RB, RC, 18);
  592. SP(SI1, RA, RE, RD, RB, RC, 17); KL2(RC, RE, RD, RB, RA, 17);
  593. SP(SI0, RC, RE, RD, RB, RA, 16); KL2(RD, RA, RE, RC, RB, 16);
  594. SP(SI7, RD, RA, RE, RC, RB, 15); KL2(RA, RC, RD, RB, RE, 15);
  595. SP(SI6, RA, RC, RD, RB, RE, 14); KL2(RD, RE, RB, RA, RC, 14);
  596. SP(SI5, RD, RE, RB, RA, RC, 13); KL2(RE, RC, RD, RB, RA, 13);
  597. SP(SI4, RE, RC, RD, RB, RA, 12); KL2(RE, RD, RA, RB, RC, 12);
  598. SP(SI3, RE, RD, RA, RB, RC, 11); KL2(RA, RE, RC, RB, RD, 11);
  599. SP(SI2, RA, RE, RC, RB, RD, 10); KL2(RE, RD, RB, RC, RA, 10);
  600. SP(SI1, RE, RD, RB, RC, RA, 9); KL2(RA, RD, RB, RC, RE, 9);
  601. SP(SI0, RA, RD, RB, RC, RE, 8); KL2(RB, RE, RD, RA, RC, 8);
  602. SP(SI7, RB, RE, RD, RA, RC, 7); KL2(RE, RA, RB, RC, RD, 7);
  603. SP(SI6, RE, RA, RB, RC, RD, 6); KL2(RB, RD, RC, RE, RA, 6);
  604. SP(SI5, RB, RD, RC, RE, RA, 5); KL2(RD, RA, RB, RC, RE, 5);
  605. SP(SI4, RD, RA, RB, RC, RE, 4); KL2(RD, RB, RE, RC, RA, 4);
  606. SP(SI3, RD, RB, RE, RC, RA, 3); KL2(RE, RD, RA, RC, RB, 3);
  607. SP(SI2, RE, RD, RA, RC, RB, 2); KL2(RD, RB, RC, RA, RE, 2);
  608. SP(SI1, RD, RB, RC, RA, RE, 1); KL2(RE, RB, RC, RA, RD, 1);
  609. S(SI0, RE, RB, RC, RA, RD); K2(RC, RD, RB, RE, RA, 0);
  610. write_blocks(RC1, RD1, RB1, RE1, RK0, RK1, RK2);
  611. write_blocks(RC2, RD2, RB2, RE2, RK0, RK1, RK2);
  612. RET;
  613. SYM_FUNC_END(__serpent_dec_blk16)
  614. SYM_FUNC_START(serpent_ecb_enc_16way)
  615. /* input:
  616. * %rdi: ctx, CTX
  617. * %rsi: dst
  618. * %rdx: src
  619. */
  620. FRAME_BEGIN
  621. vzeroupper;
  622. load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
  623. call __serpent_enc_blk16;
  624. store_16way(%rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
  625. vzeroupper;
  626. FRAME_END
  627. RET;
  628. SYM_FUNC_END(serpent_ecb_enc_16way)
  629. SYM_FUNC_START(serpent_ecb_dec_16way)
  630. /* input:
  631. * %rdi: ctx, CTX
  632. * %rsi: dst
  633. * %rdx: src
  634. */
  635. FRAME_BEGIN
  636. vzeroupper;
  637. load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
  638. call __serpent_dec_blk16;
  639. store_16way(%rsi, RC1, RD1, RB1, RE1, RC2, RD2, RB2, RE2);
  640. vzeroupper;
  641. FRAME_END
  642. RET;
  643. SYM_FUNC_END(serpent_ecb_dec_16way)
  644. SYM_FUNC_START(serpent_cbc_dec_16way)
  645. /* input:
  646. * %rdi: ctx, CTX
  647. * %rsi: dst
  648. * %rdx: src
  649. */
  650. FRAME_BEGIN
  651. vzeroupper;
  652. load_16way(%rdx, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2);
  653. call __serpent_dec_blk16;
  654. store_cbc_16way(%rdx, %rsi, RC1, RD1, RB1, RE1, RC2, RD2, RB2, RE2,
  655. RK0);
  656. vzeroupper;
  657. FRAME_END
  658. RET;
  659. SYM_FUNC_END(serpent_cbc_dec_16way)