123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372 |
- /* SPDX-License-Identifier: GPL-2.0-or-later */
- /*
- * SM4 Cipher Algorithm, using ARMv8 Crypto Extensions
- * as specified in
- * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
- *
- * Copyright (C) 2022, Alibaba Group.
- * Copyright (C) 2022 Tianjia Zhang <[email protected]>
- */
- #include <linux/module.h>
- #include <linux/crypto.h>
- #include <linux/kernel.h>
- #include <linux/cpufeature.h>
- #include <asm/neon.h>
- #include <asm/simd.h>
- #include <crypto/internal/simd.h>
- #include <crypto/internal/skcipher.h>
- #include <crypto/sm4.h>
- #define BYTES2BLKS(nbytes) ((nbytes) >> 4)
- asmlinkage void sm4_ce_expand_key(const u8 *key, u32 *rkey_enc, u32 *rkey_dec,
- const u32 *fk, const u32 *ck);
- asmlinkage void sm4_ce_crypt_block(const u32 *rkey, u8 *dst, const u8 *src);
- asmlinkage void sm4_ce_crypt(const u32 *rkey, u8 *dst, const u8 *src,
- unsigned int nblks);
- asmlinkage void sm4_ce_cbc_enc(const u32 *rkey, u8 *dst, const u8 *src,
- u8 *iv, unsigned int nblks);
- asmlinkage void sm4_ce_cbc_dec(const u32 *rkey, u8 *dst, const u8 *src,
- u8 *iv, unsigned int nblks);
- asmlinkage void sm4_ce_cfb_enc(const u32 *rkey, u8 *dst, const u8 *src,
- u8 *iv, unsigned int nblks);
- asmlinkage void sm4_ce_cfb_dec(const u32 *rkey, u8 *dst, const u8 *src,
- u8 *iv, unsigned int nblks);
- asmlinkage void sm4_ce_ctr_enc(const u32 *rkey, u8 *dst, const u8 *src,
- u8 *iv, unsigned int nblks);
- static int sm4_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int key_len)
- {
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- if (key_len != SM4_KEY_SIZE)
- return -EINVAL;
- sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
- crypto_sm4_fk, crypto_sm4_ck);
- return 0;
- }
- static int sm4_ecb_do_crypt(struct skcipher_request *req, const u32 *rkey)
- {
- struct skcipher_walk walk;
- unsigned int nbytes;
- int err;
- err = skcipher_walk_virt(&walk, req, false);
- while ((nbytes = walk.nbytes) > 0) {
- const u8 *src = walk.src.virt.addr;
- u8 *dst = walk.dst.virt.addr;
- unsigned int nblks;
- kernel_neon_begin();
- nblks = BYTES2BLKS(nbytes);
- if (nblks) {
- sm4_ce_crypt(rkey, dst, src, nblks);
- nbytes -= nblks * SM4_BLOCK_SIZE;
- }
- kernel_neon_end();
- err = skcipher_walk_done(&walk, nbytes);
- }
- return err;
- }
- static int sm4_ecb_encrypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- return sm4_ecb_do_crypt(req, ctx->rkey_enc);
- }
- static int sm4_ecb_decrypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- return sm4_ecb_do_crypt(req, ctx->rkey_dec);
- }
- static int sm4_cbc_encrypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct skcipher_walk walk;
- unsigned int nbytes;
- int err;
- err = skcipher_walk_virt(&walk, req, false);
- while ((nbytes = walk.nbytes) > 0) {
- const u8 *src = walk.src.virt.addr;
- u8 *dst = walk.dst.virt.addr;
- unsigned int nblks;
- kernel_neon_begin();
- nblks = BYTES2BLKS(nbytes);
- if (nblks) {
- sm4_ce_cbc_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
- nbytes -= nblks * SM4_BLOCK_SIZE;
- }
- kernel_neon_end();
- err = skcipher_walk_done(&walk, nbytes);
- }
- return err;
- }
- static int sm4_cbc_decrypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct skcipher_walk walk;
- unsigned int nbytes;
- int err;
- err = skcipher_walk_virt(&walk, req, false);
- while ((nbytes = walk.nbytes) > 0) {
- const u8 *src = walk.src.virt.addr;
- u8 *dst = walk.dst.virt.addr;
- unsigned int nblks;
- kernel_neon_begin();
- nblks = BYTES2BLKS(nbytes);
- if (nblks) {
- sm4_ce_cbc_dec(ctx->rkey_dec, dst, src, walk.iv, nblks);
- nbytes -= nblks * SM4_BLOCK_SIZE;
- }
- kernel_neon_end();
- err = skcipher_walk_done(&walk, nbytes);
- }
- return err;
- }
- static int sm4_cfb_encrypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct skcipher_walk walk;
- unsigned int nbytes;
- int err;
- err = skcipher_walk_virt(&walk, req, false);
- while ((nbytes = walk.nbytes) > 0) {
- const u8 *src = walk.src.virt.addr;
- u8 *dst = walk.dst.virt.addr;
- unsigned int nblks;
- kernel_neon_begin();
- nblks = BYTES2BLKS(nbytes);
- if (nblks) {
- sm4_ce_cfb_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
- dst += nblks * SM4_BLOCK_SIZE;
- src += nblks * SM4_BLOCK_SIZE;
- nbytes -= nblks * SM4_BLOCK_SIZE;
- }
- /* tail */
- if (walk.nbytes == walk.total && nbytes > 0) {
- u8 keystream[SM4_BLOCK_SIZE];
- sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
- crypto_xor_cpy(dst, src, keystream, nbytes);
- nbytes = 0;
- }
- kernel_neon_end();
- err = skcipher_walk_done(&walk, nbytes);
- }
- return err;
- }
- static int sm4_cfb_decrypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct skcipher_walk walk;
- unsigned int nbytes;
- int err;
- err = skcipher_walk_virt(&walk, req, false);
- while ((nbytes = walk.nbytes) > 0) {
- const u8 *src = walk.src.virt.addr;
- u8 *dst = walk.dst.virt.addr;
- unsigned int nblks;
- kernel_neon_begin();
- nblks = BYTES2BLKS(nbytes);
- if (nblks) {
- sm4_ce_cfb_dec(ctx->rkey_enc, dst, src, walk.iv, nblks);
- dst += nblks * SM4_BLOCK_SIZE;
- src += nblks * SM4_BLOCK_SIZE;
- nbytes -= nblks * SM4_BLOCK_SIZE;
- }
- /* tail */
- if (walk.nbytes == walk.total && nbytes > 0) {
- u8 keystream[SM4_BLOCK_SIZE];
- sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
- crypto_xor_cpy(dst, src, keystream, nbytes);
- nbytes = 0;
- }
- kernel_neon_end();
- err = skcipher_walk_done(&walk, nbytes);
- }
- return err;
- }
- static int sm4_ctr_crypt(struct skcipher_request *req)
- {
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
- struct skcipher_walk walk;
- unsigned int nbytes;
- int err;
- err = skcipher_walk_virt(&walk, req, false);
- while ((nbytes = walk.nbytes) > 0) {
- const u8 *src = walk.src.virt.addr;
- u8 *dst = walk.dst.virt.addr;
- unsigned int nblks;
- kernel_neon_begin();
- nblks = BYTES2BLKS(nbytes);
- if (nblks) {
- sm4_ce_ctr_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
- dst += nblks * SM4_BLOCK_SIZE;
- src += nblks * SM4_BLOCK_SIZE;
- nbytes -= nblks * SM4_BLOCK_SIZE;
- }
- /* tail */
- if (walk.nbytes == walk.total && nbytes > 0) {
- u8 keystream[SM4_BLOCK_SIZE];
- sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
- crypto_inc(walk.iv, SM4_BLOCK_SIZE);
- crypto_xor_cpy(dst, src, keystream, nbytes);
- nbytes = 0;
- }
- kernel_neon_end();
- err = skcipher_walk_done(&walk, nbytes);
- }
- return err;
- }
- static struct skcipher_alg sm4_algs[] = {
- {
- .base = {
- .cra_name = "ecb(sm4)",
- .cra_driver_name = "ecb-sm4-ce",
- .cra_priority = 400,
- .cra_blocksize = SM4_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct sm4_ctx),
- .cra_module = THIS_MODULE,
- },
- .min_keysize = SM4_KEY_SIZE,
- .max_keysize = SM4_KEY_SIZE,
- .setkey = sm4_setkey,
- .encrypt = sm4_ecb_encrypt,
- .decrypt = sm4_ecb_decrypt,
- }, {
- .base = {
- .cra_name = "cbc(sm4)",
- .cra_driver_name = "cbc-sm4-ce",
- .cra_priority = 400,
- .cra_blocksize = SM4_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct sm4_ctx),
- .cra_module = THIS_MODULE,
- },
- .min_keysize = SM4_KEY_SIZE,
- .max_keysize = SM4_KEY_SIZE,
- .ivsize = SM4_BLOCK_SIZE,
- .setkey = sm4_setkey,
- .encrypt = sm4_cbc_encrypt,
- .decrypt = sm4_cbc_decrypt,
- }, {
- .base = {
- .cra_name = "cfb(sm4)",
- .cra_driver_name = "cfb-sm4-ce",
- .cra_priority = 400,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct sm4_ctx),
- .cra_module = THIS_MODULE,
- },
- .min_keysize = SM4_KEY_SIZE,
- .max_keysize = SM4_KEY_SIZE,
- .ivsize = SM4_BLOCK_SIZE,
- .chunksize = SM4_BLOCK_SIZE,
- .setkey = sm4_setkey,
- .encrypt = sm4_cfb_encrypt,
- .decrypt = sm4_cfb_decrypt,
- }, {
- .base = {
- .cra_name = "ctr(sm4)",
- .cra_driver_name = "ctr-sm4-ce",
- .cra_priority = 400,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct sm4_ctx),
- .cra_module = THIS_MODULE,
- },
- .min_keysize = SM4_KEY_SIZE,
- .max_keysize = SM4_KEY_SIZE,
- .ivsize = SM4_BLOCK_SIZE,
- .chunksize = SM4_BLOCK_SIZE,
- .setkey = sm4_setkey,
- .encrypt = sm4_ctr_crypt,
- .decrypt = sm4_ctr_crypt,
- }
- };
- static int __init sm4_init(void)
- {
- return crypto_register_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
- }
- static void __exit sm4_exit(void)
- {
- crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
- }
- module_cpu_feature_match(SM4, sm4_init);
- module_exit(sm4_exit);
- MODULE_DESCRIPTION("SM4 ECB/CBC/CFB/CTR using ARMv8 Crypto Extensions");
- MODULE_ALIAS_CRYPTO("sm4-ce");
- MODULE_ALIAS_CRYPTO("sm4");
- MODULE_ALIAS_CRYPTO("ecb(sm4)");
- MODULE_ALIAS_CRYPTO("cbc(sm4)");
- MODULE_ALIAS_CRYPTO("cfb(sm4)");
- MODULE_ALIAS_CRYPTO("ctr(sm4)");
- MODULE_AUTHOR("Tianjia Zhang <[email protected]>");
- MODULE_LICENSE("GPL v2");
|