crc32c_glue.c 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /* Glue code for CRC32C optimized for sparc64 crypto opcodes.
  3. *
  4. * This is based largely upon arch/x86/crypto/crc32c-intel.c
  5. *
  6. * Copyright (C) 2008 Intel Corporation
  7. * Authors: Austin Zhang <[email protected]>
  8. * Kent Liu <[email protected]>
  9. */
  10. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  11. #include <linux/init.h>
  12. #include <linux/module.h>
  13. #include <linux/string.h>
  14. #include <linux/kernel.h>
  15. #include <linux/crc32.h>
  16. #include <crypto/internal/hash.h>
  17. #include <asm/pstate.h>
  18. #include <asm/elf.h>
  19. #include "opcodes.h"
  20. /*
  21. * Setting the seed allows arbitrary accumulators and flexible XOR policy
  22. * If your algorithm starts with ~0, then XOR with ~0 before you set
  23. * the seed.
  24. */
  25. static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
  26. unsigned int keylen)
  27. {
  28. u32 *mctx = crypto_shash_ctx(hash);
  29. if (keylen != sizeof(u32))
  30. return -EINVAL;
  31. *mctx = le32_to_cpup((__le32 *)key);
  32. return 0;
  33. }
  34. static int crc32c_sparc64_init(struct shash_desc *desc)
  35. {
  36. u32 *mctx = crypto_shash_ctx(desc->tfm);
  37. u32 *crcp = shash_desc_ctx(desc);
  38. *crcp = *mctx;
  39. return 0;
  40. }
  41. extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
  42. static void crc32c_compute(u32 *crcp, const u64 *data, unsigned int len)
  43. {
  44. unsigned int asm_len;
  45. asm_len = len & ~7U;
  46. if (asm_len) {
  47. crc32c_sparc64(crcp, data, asm_len);
  48. data += asm_len / 8;
  49. len -= asm_len;
  50. }
  51. if (len)
  52. *crcp = __crc32c_le(*crcp, (const unsigned char *) data, len);
  53. }
  54. static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
  55. unsigned int len)
  56. {
  57. u32 *crcp = shash_desc_ctx(desc);
  58. crc32c_compute(crcp, (const u64 *) data, len);
  59. return 0;
  60. }
  61. static int __crc32c_sparc64_finup(u32 *crcp, const u8 *data, unsigned int len,
  62. u8 *out)
  63. {
  64. u32 tmp = *crcp;
  65. crc32c_compute(&tmp, (const u64 *) data, len);
  66. *(__le32 *) out = ~cpu_to_le32(tmp);
  67. return 0;
  68. }
  69. static int crc32c_sparc64_finup(struct shash_desc *desc, const u8 *data,
  70. unsigned int len, u8 *out)
  71. {
  72. return __crc32c_sparc64_finup(shash_desc_ctx(desc), data, len, out);
  73. }
  74. static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
  75. {
  76. u32 *crcp = shash_desc_ctx(desc);
  77. *(__le32 *) out = ~cpu_to_le32p(crcp);
  78. return 0;
  79. }
  80. static int crc32c_sparc64_digest(struct shash_desc *desc, const u8 *data,
  81. unsigned int len, u8 *out)
  82. {
  83. return __crc32c_sparc64_finup(crypto_shash_ctx(desc->tfm), data, len,
  84. out);
  85. }
  86. static int crc32c_sparc64_cra_init(struct crypto_tfm *tfm)
  87. {
  88. u32 *key = crypto_tfm_ctx(tfm);
  89. *key = ~0;
  90. return 0;
  91. }
  92. #define CHKSUM_BLOCK_SIZE 1
  93. #define CHKSUM_DIGEST_SIZE 4
  94. static struct shash_alg alg = {
  95. .setkey = crc32c_sparc64_setkey,
  96. .init = crc32c_sparc64_init,
  97. .update = crc32c_sparc64_update,
  98. .final = crc32c_sparc64_final,
  99. .finup = crc32c_sparc64_finup,
  100. .digest = crc32c_sparc64_digest,
  101. .descsize = sizeof(u32),
  102. .digestsize = CHKSUM_DIGEST_SIZE,
  103. .base = {
  104. .cra_name = "crc32c",
  105. .cra_driver_name = "crc32c-sparc64",
  106. .cra_priority = SPARC_CR_OPCODE_PRIORITY,
  107. .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
  108. .cra_blocksize = CHKSUM_BLOCK_SIZE,
  109. .cra_ctxsize = sizeof(u32),
  110. .cra_alignmask = 7,
  111. .cra_module = THIS_MODULE,
  112. .cra_init = crc32c_sparc64_cra_init,
  113. }
  114. };
  115. static bool __init sparc64_has_crc32c_opcode(void)
  116. {
  117. unsigned long cfr;
  118. if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
  119. return false;
  120. __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
  121. if (!(cfr & CFR_CRC32C))
  122. return false;
  123. return true;
  124. }
  125. static int __init crc32c_sparc64_mod_init(void)
  126. {
  127. if (sparc64_has_crc32c_opcode()) {
  128. pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
  129. return crypto_register_shash(&alg);
  130. }
  131. pr_info("sparc64 crc32c opcode not available.\n");
  132. return -ENODEV;
  133. }
  134. static void __exit crc32c_sparc64_mod_fini(void)
  135. {
  136. crypto_unregister_shash(&alg);
  137. }
  138. module_init(crc32c_sparc64_mod_init);
  139. module_exit(crc32c_sparc64_mod_fini);
  140. MODULE_LICENSE("GPL");
  141. MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
  142. MODULE_ALIAS_CRYPTO("crc32c");
  143. #include "crop_devid.c"