Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu: "Here is the crypto update for 4.2: API: - Convert RNG interface to new style. - New AEAD interface with one SG list for AD and plain/cipher text. All external AEAD users have been converted. - New asymmetric key interface (akcipher). Algorithms: - Chacha20, Poly1305 and RFC7539 support. - New RSA implementation. - Jitter RNG. - DRBG is now seeded with both /dev/random and Jitter RNG. If kernel pool isn't ready then DRBG will be reseeded when it is. - DRBG is now the default crypto API RNG, replacing krng. - 842 compression (previously part of powerpc nx driver). Drivers: - Accelerated SHA-512 for arm64. - New Marvell CESA driver that supports DMA and more algorithms. - Updated powerpc nx 842 support. - Added support for SEC1 hardware to talitos" * git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (292 commits) crypto: marvell/cesa - remove COMPILE_TEST dependency crypto: algif_aead - Temporarily disable all AEAD algorithms crypto: af_alg - Forbid the use internal algorithms crypto: echainiv - Only hold RNG during initialisation crypto: seqiv - Add compatibility support without RNG crypto: eseqiv - Offer normal cipher functionality without RNG crypto: chainiv - Offer normal cipher functionality without RNG crypto: user - Add CRYPTO_MSG_DELRNG crypto: user - Move cryptouser.h to uapi crypto: rng - Do not free default RNG when it becomes unused crypto: skcipher - Allow givencrypt to be NULL crypto: sahara - propagate the error on clk_disable_unprepare() failure crypto: rsa - fix invalid select for AKCIPHER crypto: picoxcell - Update to the current clk API crypto: nx - Check for bogus firmware properties crypto: marvell/cesa - add DT bindings documentation crypto: marvell/cesa - add support for Kirkwood and Dove SoCs crypto: marvell/cesa - add support for Orion SoCs crypto: marvell/cesa - add allhwsupport module parameter crypto: marvell/cesa - add support for all armada SoCs ...
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
config CRYPTO_DEV_VMX_ENCRYPT
|
||||
tristate "Encryption acceleration support on P8 CPU"
|
||||
depends on PPC64 && CRYPTO_DEV_VMX
|
||||
depends on CRYPTO_DEV_VMX
|
||||
default y
|
||||
help
|
||||
Support for VMX cryptographic acceleration instructions on Power8 CPU.
|
||||
|
@@ -4,7 +4,7 @@ vmx-crypto-objs := vmx.o aesp8-ppc.o ghashp8-ppc.o aes.o aes_cbc.o aes_ctr.o gha
|
||||
ifeq ($(CONFIG_CPU_LITTLE_ENDIAN),y)
|
||||
TARGET := linux-ppc64le
|
||||
else
|
||||
TARGET := linux-pcc64
|
||||
TARGET := linux-ppc64
|
||||
endif
|
||||
|
||||
quiet_cmd_perl = PERL $@
|
||||
|
@@ -30,116 +30,118 @@
|
||||
#include "aesp8-ppc.h"
|
||||
|
||||
struct p8_aes_ctx {
|
||||
struct crypto_cipher *fallback;
|
||||
struct aes_key enc_key;
|
||||
struct aes_key dec_key;
|
||||
struct crypto_cipher *fallback;
|
||||
struct aes_key enc_key;
|
||||
struct aes_key dec_key;
|
||||
};
|
||||
|
||||
static int p8_aes_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
const char *alg;
|
||||
struct crypto_cipher *fallback;
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
const char *alg;
|
||||
struct crypto_cipher *fallback;
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
|
||||
fallback = crypto_alloc_cipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
|
||||
fallback = crypto_alloc_cipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR
|
||||
"Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
|
||||
|
||||
crypto_cipher_set_flags(fallback,
|
||||
crypto_cipher_get_flags((struct crypto_cipher *) tfm));
|
||||
ctx->fallback = fallback;
|
||||
crypto_cipher_set_flags(fallback,
|
||||
crypto_cipher_get_flags((struct
|
||||
crypto_cipher *)
|
||||
tfm));
|
||||
ctx->fallback = fallback;
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void p8_aes_exit(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (ctx->fallback) {
|
||||
crypto_free_cipher(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
if (ctx->fallback) {
|
||||
crypto_free_cipher(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static int p8_aes_setkey(struct crypto_tfm *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
unsigned int keylen)
|
||||
{
|
||||
int ret;
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
int ret;
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
|
||||
ret += crypto_cipher_setkey(ctx->fallback, key, keylen);
|
||||
return ret;
|
||||
ret += crypto_cipher_setkey(ctx->fallback, key, keylen);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (in_interrupt()) {
|
||||
crypto_cipher_encrypt_one(ctx->fallback, dst, src);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_encrypt(src, dst, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
if (in_interrupt()) {
|
||||
crypto_cipher_encrypt_one(ctx->fallback, dst, src);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_encrypt(src, dst, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
}
|
||||
|
||||
static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
|
||||
{
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (in_interrupt()) {
|
||||
crypto_cipher_decrypt_one(ctx->fallback, dst, src);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_decrypt(src, dst, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
if (in_interrupt()) {
|
||||
crypto_cipher_decrypt_one(ctx->fallback, dst, src);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_decrypt(src, dst, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
}
|
||||
|
||||
struct crypto_alg p8_aes_alg = {
|
||||
.cra_name = "aes",
|
||||
.cra_driver_name = "p8_aes",
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_priority = 1000,
|
||||
.cra_type = NULL,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_alignmask = 0,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct p8_aes_ctx),
|
||||
.cra_init = p8_aes_init,
|
||||
.cra_exit = p8_aes_exit,
|
||||
.cra_cipher = {
|
||||
.cia_min_keysize = AES_MIN_KEY_SIZE,
|
||||
.cia_max_keysize = AES_MAX_KEY_SIZE,
|
||||
.cia_setkey = p8_aes_setkey,
|
||||
.cia_encrypt = p8_aes_encrypt,
|
||||
.cia_decrypt = p8_aes_decrypt,
|
||||
},
|
||||
.cra_name = "aes",
|
||||
.cra_driver_name = "p8_aes",
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_priority = 1000,
|
||||
.cra_type = NULL,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_alignmask = 0,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct p8_aes_ctx),
|
||||
.cra_init = p8_aes_init,
|
||||
.cra_exit = p8_aes_exit,
|
||||
.cra_cipher = {
|
||||
.cia_min_keysize = AES_MIN_KEY_SIZE,
|
||||
.cia_max_keysize = AES_MAX_KEY_SIZE,
|
||||
.cia_setkey = p8_aes_setkey,
|
||||
.cia_encrypt = p8_aes_encrypt,
|
||||
.cia_decrypt = p8_aes_decrypt,
|
||||
},
|
||||
};
|
||||
|
||||
|
@@ -31,160 +31,168 @@
|
||||
#include "aesp8-ppc.h"
|
||||
|
||||
struct p8_aes_cbc_ctx {
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct aes_key enc_key;
|
||||
struct aes_key dec_key;
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct aes_key enc_key;
|
||||
struct aes_key dec_key;
|
||||
};
|
||||
|
||||
static int p8_aes_cbc_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
const char *alg;
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
const char *alg;
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
|
||||
fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
|
||||
fallback =
|
||||
crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR
|
||||
"Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
|
||||
|
||||
crypto_blkcipher_set_flags(fallback,
|
||||
crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm));
|
||||
ctx->fallback = fallback;
|
||||
crypto_blkcipher_set_flags(
|
||||
fallback,
|
||||
crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm));
|
||||
ctx->fallback = fallback;
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void p8_aes_cbc_exit(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (ctx->fallback) {
|
||||
crypto_free_blkcipher(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
if (ctx->fallback) {
|
||||
crypto_free_blkcipher(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
unsigned int keylen)
|
||||
{
|
||||
int ret;
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
int ret;
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
|
||||
ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
|
||||
return ret;
|
||||
ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
struct scatterlist *dst,
|
||||
struct scatterlist *src, unsigned int nbytes)
|
||||
{
|
||||
int ret;
|
||||
struct blkcipher_walk walk;
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(
|
||||
crypto_blkcipher_tfm(desc->tfm));
|
||||
struct blkcipher_desc fallback_desc = {
|
||||
.tfm = ctx->fallback,
|
||||
.info = desc->info,
|
||||
.flags = desc->flags
|
||||
};
|
||||
int ret;
|
||||
struct blkcipher_walk walk;
|
||||
struct p8_aes_cbc_ctx *ctx =
|
||||
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
|
||||
struct blkcipher_desc fallback_desc = {
|
||||
.tfm = ctx->fallback,
|
||||
.info = desc->info,
|
||||
.flags = desc->flags
|
||||
};
|
||||
|
||||
if (in_interrupt()) {
|
||||
ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
if (in_interrupt()) {
|
||||
ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src,
|
||||
nbytes);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
ret = blkcipher_walk_virt(desc, &walk);
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK, &ctx->enc_key, walk.iv, 1);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
{
|
||||
int ret;
|
||||
struct blkcipher_walk walk;
|
||||
struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(
|
||||
crypto_blkcipher_tfm(desc->tfm));
|
||||
struct blkcipher_desc fallback_desc = {
|
||||
.tfm = ctx->fallback,
|
||||
.info = desc->info,
|
||||
.flags = desc->flags
|
||||
};
|
||||
|
||||
if (in_interrupt()) {
|
||||
ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
ret = blkcipher_walk_virt(desc, &walk);
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aes_p8_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK, &ctx->dec_key, walk.iv, 0);
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
ret = blkcipher_walk_virt(desc, &walk);
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aes_p8_cbc_encrypt(walk.src.virt.addr,
|
||||
walk.dst.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK,
|
||||
&ctx->enc_key, walk.iv, 1);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
|
||||
return ret;
|
||||
return ret;
|
||||
}
|
||||
|
||||
static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst,
|
||||
struct scatterlist *src, unsigned int nbytes)
|
||||
{
|
||||
int ret;
|
||||
struct blkcipher_walk walk;
|
||||
struct p8_aes_cbc_ctx *ctx =
|
||||
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
|
||||
struct blkcipher_desc fallback_desc = {
|
||||
.tfm = ctx->fallback,
|
||||
.info = desc->info,
|
||||
.flags = desc->flags
|
||||
};
|
||||
|
||||
if (in_interrupt()) {
|
||||
ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src,
|
||||
nbytes);
|
||||
} else {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
ret = blkcipher_walk_virt(desc, &walk);
|
||||
while ((nbytes = walk.nbytes)) {
|
||||
aes_p8_cbc_encrypt(walk.src.virt.addr,
|
||||
walk.dst.virt.addr,
|
||||
nbytes & AES_BLOCK_MASK,
|
||||
&ctx->dec_key, walk.iv, 0);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
struct crypto_alg p8_aes_cbc_alg = {
|
||||
.cra_name = "cbc(aes)",
|
||||
.cra_driver_name = "p8_aes_cbc",
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_priority = 1000,
|
||||
.cra_type = &crypto_blkcipher_type,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_alignmask = 0,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct p8_aes_cbc_ctx),
|
||||
.cra_init = p8_aes_cbc_init,
|
||||
.cra_exit = p8_aes_cbc_exit,
|
||||
.cra_blkcipher = {
|
||||
.ivsize = 0,
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = p8_aes_cbc_setkey,
|
||||
.encrypt = p8_aes_cbc_encrypt,
|
||||
.decrypt = p8_aes_cbc_decrypt,
|
||||
},
|
||||
.cra_name = "cbc(aes)",
|
||||
.cra_driver_name = "p8_aes_cbc",
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_priority = 1000,
|
||||
.cra_type = &crypto_blkcipher_type,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_alignmask = 0,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct p8_aes_cbc_ctx),
|
||||
.cra_init = p8_aes_cbc_init,
|
||||
.cra_exit = p8_aes_cbc_exit,
|
||||
.cra_blkcipher = {
|
||||
.ivsize = 0,
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = p8_aes_cbc_setkey,
|
||||
.encrypt = p8_aes_cbc_encrypt,
|
||||
.decrypt = p8_aes_cbc_decrypt,
|
||||
},
|
||||
};
|
||||
|
||||
|
@@ -30,138 +30,147 @@
|
||||
#include "aesp8-ppc.h"
|
||||
|
||||
struct p8_aes_ctr_ctx {
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct aes_key enc_key;
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct aes_key enc_key;
|
||||
};
|
||||
|
||||
static int p8_aes_ctr_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
const char *alg;
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
const char *alg;
|
||||
struct crypto_blkcipher *fallback;
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
|
||||
fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
|
||||
fallback =
|
||||
crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR
|
||||
"Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
|
||||
|
||||
crypto_blkcipher_set_flags(fallback,
|
||||
crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm));
|
||||
ctx->fallback = fallback;
|
||||
crypto_blkcipher_set_flags(
|
||||
fallback,
|
||||
crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm));
|
||||
ctx->fallback = fallback;
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (ctx->fallback) {
|
||||
crypto_free_blkcipher(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
if (ctx->fallback) {
|
||||
crypto_free_blkcipher(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
unsigned int keylen)
|
||||
{
|
||||
int ret;
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
int ret;
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
|
||||
ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
|
||||
return ret;
|
||||
ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
|
||||
struct blkcipher_walk *walk)
|
||||
struct blkcipher_walk *walk)
|
||||
{
|
||||
u8 *ctrblk = walk->iv;
|
||||
u8 keystream[AES_BLOCK_SIZE];
|
||||
u8 *src = walk->src.virt.addr;
|
||||
u8 *dst = walk->dst.virt.addr;
|
||||
unsigned int nbytes = walk->nbytes;
|
||||
u8 *ctrblk = walk->iv;
|
||||
u8 keystream[AES_BLOCK_SIZE];
|
||||
u8 *src = walk->src.virt.addr;
|
||||
u8 *dst = walk->dst.virt.addr;
|
||||
unsigned int nbytes = walk->nbytes;
|
||||
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
|
||||
pagefault_enable();
|
||||
|
||||
crypto_xor(keystream, src, nbytes);
|
||||
memcpy(dst, keystream, nbytes);
|
||||
crypto_inc(ctrblk, AES_BLOCK_SIZE);
|
||||
crypto_xor(keystream, src, nbytes);
|
||||
memcpy(dst, keystream, nbytes);
|
||||
crypto_inc(ctrblk, AES_BLOCK_SIZE);
|
||||
}
|
||||
|
||||
static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
|
||||
struct scatterlist *dst, struct scatterlist *src,
|
||||
unsigned int nbytes)
|
||||
struct scatterlist *dst,
|
||||
struct scatterlist *src, unsigned int nbytes)
|
||||
{
|
||||
int ret;
|
||||
struct blkcipher_walk walk;
|
||||
struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(
|
||||
crypto_blkcipher_tfm(desc->tfm));
|
||||
struct blkcipher_desc fallback_desc = {
|
||||
.tfm = ctx->fallback,
|
||||
.info = desc->info,
|
||||
.flags = desc->flags
|
||||
};
|
||||
int ret;
|
||||
struct blkcipher_walk walk;
|
||||
struct p8_aes_ctr_ctx *ctx =
|
||||
crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
|
||||
struct blkcipher_desc fallback_desc = {
|
||||
.tfm = ctx->fallback,
|
||||
.info = desc->info,
|
||||
.flags = desc->flags
|
||||
};
|
||||
|
||||
if (in_interrupt()) {
|
||||
ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes);
|
||||
} else {
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
|
||||
while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, walk.dst.virt.addr,
|
||||
(nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv);
|
||||
pagefault_enable();
|
||||
if (in_interrupt()) {
|
||||
ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src,
|
||||
nbytes);
|
||||
} else {
|
||||
blkcipher_walk_init(&walk, dst, src, nbytes);
|
||||
ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
|
||||
while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
|
||||
walk.dst.virt.addr,
|
||||
(nbytes &
|
||||
AES_BLOCK_MASK) /
|
||||
AES_BLOCK_SIZE,
|
||||
&ctx->enc_key,
|
||||
walk.iv);
|
||||
pagefault_enable();
|
||||
|
||||
crypto_inc(walk.iv, AES_BLOCK_SIZE);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
if (walk.nbytes) {
|
||||
p8_aes_ctr_final(ctx, &walk);
|
||||
ret = blkcipher_walk_done(desc, &walk, 0);
|
||||
}
|
||||
}
|
||||
crypto_inc(walk.iv, AES_BLOCK_SIZE);
|
||||
nbytes &= AES_BLOCK_SIZE - 1;
|
||||
ret = blkcipher_walk_done(desc, &walk, nbytes);
|
||||
}
|
||||
if (walk.nbytes) {
|
||||
p8_aes_ctr_final(ctx, &walk);
|
||||
ret = blkcipher_walk_done(desc, &walk, 0);
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
return ret;
|
||||
}
|
||||
|
||||
struct crypto_alg p8_aes_ctr_alg = {
|
||||
.cra_name = "ctr(aes)",
|
||||
.cra_driver_name = "p8_aes_ctr",
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_priority = 1000,
|
||||
.cra_type = &crypto_blkcipher_type,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_alignmask = 0,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
|
||||
.cra_init = p8_aes_ctr_init,
|
||||
.cra_exit = p8_aes_ctr_exit,
|
||||
.cra_blkcipher = {
|
||||
.ivsize = 0,
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = p8_aes_ctr_setkey,
|
||||
.encrypt = p8_aes_ctr_crypt,
|
||||
.decrypt = p8_aes_ctr_crypt,
|
||||
},
|
||||
.cra_name = "ctr(aes)",
|
||||
.cra_driver_name = "p8_aes_ctr",
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_priority = 1000,
|
||||
.cra_type = &crypto_blkcipher_type,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_alignmask = 0,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
|
||||
.cra_init = p8_aes_ctr_init,
|
||||
.cra_exit = p8_aes_ctr_exit,
|
||||
.cra_blkcipher = {
|
||||
.ivsize = 0,
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = p8_aes_ctr_setkey,
|
||||
.encrypt = p8_aes_ctr_crypt,
|
||||
.decrypt = p8_aes_ctr_crypt,
|
||||
},
|
||||
};
|
||||
|
@@ -4,17 +4,18 @@
|
||||
#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
|
||||
|
||||
struct aes_key {
|
||||
u8 key[AES_MAX_KEYLENGTH];
|
||||
int rounds;
|
||||
u8 key[AES_MAX_KEYLENGTH];
|
||||
int rounds;
|
||||
};
|
||||
|
||||
int aes_p8_set_encrypt_key(const u8 *userKey, const int bits,
|
||||
struct aes_key *key);
|
||||
struct aes_key *key);
|
||||
int aes_p8_set_decrypt_key(const u8 *userKey, const int bits,
|
||||
struct aes_key *key);
|
||||
struct aes_key *key);
|
||||
void aes_p8_encrypt(const u8 *in, u8 *out, const struct aes_key *key);
|
||||
void aes_p8_decrypt(const u8 *in, u8 *out,const struct aes_key *key);
|
||||
void aes_p8_decrypt(const u8 *in, u8 *out, const struct aes_key *key);
|
||||
void aes_p8_cbc_encrypt(const u8 *in, u8 *out, size_t len,
|
||||
const struct aes_key *key, u8 *iv, const int enc);
|
||||
const struct aes_key *key, u8 *iv, const int enc);
|
||||
void aes_p8_ctr32_encrypt_blocks(const u8 *in, u8 *out,
|
||||
size_t len, const struct aes_key *key, const u8 *iv);
|
||||
size_t len, const struct aes_key *key,
|
||||
const u8 *iv);
|
||||
|
@@ -39,184 +39,188 @@
|
||||
void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
|
||||
void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
|
||||
void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
|
||||
const u8 *in,size_t len);
|
||||
const u8 *in, size_t len);
|
||||
|
||||
struct p8_ghash_ctx {
|
||||
u128 htable[16];
|
||||
struct crypto_shash *fallback;
|
||||
u128 htable[16];
|
||||
struct crypto_shash *fallback;
|
||||
};
|
||||
|
||||
struct p8_ghash_desc_ctx {
|
||||
u64 shash[2];
|
||||
u8 buffer[GHASH_DIGEST_SIZE];
|
||||
int bytes;
|
||||
struct shash_desc fallback_desc;
|
||||
u64 shash[2];
|
||||
u8 buffer[GHASH_DIGEST_SIZE];
|
||||
int bytes;
|
||||
struct shash_desc fallback_desc;
|
||||
};
|
||||
|
||||
static int p8_ghash_init_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
const char *alg;
|
||||
struct crypto_shash *fallback;
|
||||
struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm);
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
const char *alg;
|
||||
struct crypto_shash *fallback;
|
||||
struct crypto_shash *shash_tfm = __crypto_shash_cast(tfm);
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
if (!(alg = crypto_tfm_alg_name(tfm))) {
|
||||
printk(KERN_ERR "Failed to get algorithm name.\n");
|
||||
return -ENOENT;
|
||||
}
|
||||
|
||||
fallback = crypto_alloc_shash(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback)));
|
||||
fallback = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
|
||||
if (IS_ERR(fallback)) {
|
||||
printk(KERN_ERR
|
||||
"Failed to allocate transformation for '%s': %ld\n",
|
||||
alg, PTR_ERR(fallback));
|
||||
return PTR_ERR(fallback);
|
||||
}
|
||||
printk(KERN_INFO "Using '%s' as fallback implementation.\n",
|
||||
crypto_tfm_alg_driver_name(crypto_shash_tfm(fallback)));
|
||||
|
||||
crypto_shash_set_flags(fallback,
|
||||
crypto_shash_get_flags((struct crypto_shash *) tfm));
|
||||
ctx->fallback = fallback;
|
||||
crypto_shash_set_flags(fallback,
|
||||
crypto_shash_get_flags((struct crypto_shash
|
||||
*) tfm));
|
||||
ctx->fallback = fallback;
|
||||
|
||||
shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx)
|
||||
+ crypto_shash_descsize(fallback);
|
||||
shash_tfm->descsize = sizeof(struct p8_ghash_desc_ctx)
|
||||
+ crypto_shash_descsize(fallback);
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void p8_ghash_exit_tfm(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
if (ctx->fallback) {
|
||||
crypto_free_shash(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
if (ctx->fallback) {
|
||||
crypto_free_shash(ctx->fallback);
|
||||
ctx->fallback = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static int p8_ghash_init(struct shash_desc *desc)
|
||||
{
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
|
||||
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
|
||||
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
|
||||
|
||||
dctx->bytes = 0;
|
||||
memset(dctx->shash, 0, GHASH_DIGEST_SIZE);
|
||||
dctx->fallback_desc.tfm = ctx->fallback;
|
||||
dctx->fallback_desc.flags = desc->flags;
|
||||
return crypto_shash_init(&dctx->fallback_desc);
|
||||
dctx->bytes = 0;
|
||||
memset(dctx->shash, 0, GHASH_DIGEST_SIZE);
|
||||
dctx->fallback_desc.tfm = ctx->fallback;
|
||||
dctx->fallback_desc.flags = desc->flags;
|
||||
return crypto_shash_init(&dctx->fallback_desc);
|
||||
}
|
||||
|
||||
static int p8_ghash_setkey(struct crypto_shash *tfm, const u8 *key,
|
||||
unsigned int keylen)
|
||||
unsigned int keylen)
|
||||
{
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
|
||||
|
||||
if (keylen != GHASH_KEY_LEN)
|
||||
return -EINVAL;
|
||||
if (keylen != GHASH_KEY_LEN)
|
||||
return -EINVAL;
|
||||
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_init_p8(ctx->htable, (const u64 *) key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
return crypto_shash_setkey(ctx->fallback, key, keylen);
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_init_p8(ctx->htable, (const u64 *) key);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
return crypto_shash_setkey(ctx->fallback, key, keylen);
|
||||
}
|
||||
|
||||
static int p8_ghash_update(struct shash_desc *desc,
|
||||
const u8 *src, unsigned int srclen)
|
||||
const u8 *src, unsigned int srclen)
|
||||
{
|
||||
unsigned int len;
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
|
||||
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
|
||||
unsigned int len;
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
|
||||
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
|
||||
|
||||
if (IN_INTERRUPT) {
|
||||
return crypto_shash_update(&dctx->fallback_desc, src, srclen);
|
||||
} else {
|
||||
if (dctx->bytes) {
|
||||
if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
|
||||
memcpy(dctx->buffer + dctx->bytes, src, srclen);
|
||||
dctx->bytes += srclen;
|
||||
return 0;
|
||||
}
|
||||
memcpy(dctx->buffer + dctx->bytes, src,
|
||||
GHASH_DIGEST_SIZE - dctx->bytes);
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer,
|
||||
GHASH_DIGEST_SIZE);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
src += GHASH_DIGEST_SIZE - dctx->bytes;
|
||||
srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
|
||||
dctx->bytes = 0;
|
||||
}
|
||||
len = srclen & ~(GHASH_DIGEST_SIZE - 1);
|
||||
if (len) {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
src += len;
|
||||
srclen -= len;
|
||||
}
|
||||
if (srclen) {
|
||||
memcpy(dctx->buffer, src, srclen);
|
||||
dctx->bytes = srclen;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
if (IN_INTERRUPT) {
|
||||
return crypto_shash_update(&dctx->fallback_desc, src,
|
||||
srclen);
|
||||
} else {
|
||||
if (dctx->bytes) {
|
||||
if (dctx->bytes + srclen < GHASH_DIGEST_SIZE) {
|
||||
memcpy(dctx->buffer + dctx->bytes, src,
|
||||
srclen);
|
||||
dctx->bytes += srclen;
|
||||
return 0;
|
||||
}
|
||||
memcpy(dctx->buffer + dctx->bytes, src,
|
||||
GHASH_DIGEST_SIZE - dctx->bytes);
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable,
|
||||
dctx->buffer, GHASH_DIGEST_SIZE);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
src += GHASH_DIGEST_SIZE - dctx->bytes;
|
||||
srclen -= GHASH_DIGEST_SIZE - dctx->bytes;
|
||||
dctx->bytes = 0;
|
||||
}
|
||||
len = srclen & ~(GHASH_DIGEST_SIZE - 1);
|
||||
if (len) {
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable, src, len);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
src += len;
|
||||
srclen -= len;
|
||||
}
|
||||
if (srclen) {
|
||||
memcpy(dctx->buffer, src, srclen);
|
||||
dctx->bytes = srclen;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static int p8_ghash_final(struct shash_desc *desc, u8 *out)
|
||||
{
|
||||
int i;
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
|
||||
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
|
||||
int i;
|
||||
struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
|
||||
struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
|
||||
|
||||
if (IN_INTERRUPT) {
|
||||
return crypto_shash_final(&dctx->fallback_desc, out);
|
||||
} else {
|
||||
if (dctx->bytes) {
|
||||
for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
|
||||
dctx->buffer[i] = 0;
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable, dctx->buffer,
|
||||
GHASH_DIGEST_SIZE);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
dctx->bytes = 0;
|
||||
}
|
||||
memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
|
||||
return 0;
|
||||
}
|
||||
if (IN_INTERRUPT) {
|
||||
return crypto_shash_final(&dctx->fallback_desc, out);
|
||||
} else {
|
||||
if (dctx->bytes) {
|
||||
for (i = dctx->bytes; i < GHASH_DIGEST_SIZE; i++)
|
||||
dctx->buffer[i] = 0;
|
||||
preempt_disable();
|
||||
pagefault_disable();
|
||||
enable_kernel_altivec();
|
||||
enable_kernel_fp();
|
||||
gcm_ghash_p8(dctx->shash, ctx->htable,
|
||||
dctx->buffer, GHASH_DIGEST_SIZE);
|
||||
pagefault_enable();
|
||||
preempt_enable();
|
||||
dctx->bytes = 0;
|
||||
}
|
||||
memcpy(out, dctx->shash, GHASH_DIGEST_SIZE);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
struct shash_alg p8_ghash_alg = {
|
||||
.digestsize = GHASH_DIGEST_SIZE,
|
||||
.init = p8_ghash_init,
|
||||
.update = p8_ghash_update,
|
||||
.final = p8_ghash_final,
|
||||
.setkey = p8_ghash_setkey,
|
||||
.descsize = sizeof(struct p8_ghash_desc_ctx),
|
||||
.base = {
|
||||
.cra_name = "ghash",
|
||||
.cra_driver_name = "p8_ghash",
|
||||
.cra_priority = 1000,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_blocksize = GHASH_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct p8_ghash_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_init = p8_ghash_init_tfm,
|
||||
.cra_exit = p8_ghash_exit_tfm,
|
||||
},
|
||||
.digestsize = GHASH_DIGEST_SIZE,
|
||||
.init = p8_ghash_init,
|
||||
.update = p8_ghash_update,
|
||||
.final = p8_ghash_final,
|
||||
.setkey = p8_ghash_setkey,
|
||||
.descsize = sizeof(struct p8_ghash_desc_ctx),
|
||||
.base = {
|
||||
.cra_name = "ghash",
|
||||
.cra_driver_name = "p8_ghash",
|
||||
.cra_priority = 1000,
|
||||
.cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_NEED_FALLBACK,
|
||||
.cra_blocksize = GHASH_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(struct p8_ghash_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_init = p8_ghash_init_tfm,
|
||||
.cra_exit = p8_ghash_exit_tfm,
|
||||
},
|
||||
};
|
||||
|
@@ -32,57 +32,57 @@ extern struct crypto_alg p8_aes_alg;
|
||||
extern struct crypto_alg p8_aes_cbc_alg;
|
||||
extern struct crypto_alg p8_aes_ctr_alg;
|
||||
static struct crypto_alg *algs[] = {
|
||||
&p8_aes_alg,
|
||||
&p8_aes_cbc_alg,
|
||||
&p8_aes_ctr_alg,
|
||||
NULL,
|
||||
&p8_aes_alg,
|
||||
&p8_aes_cbc_alg,
|
||||
&p8_aes_ctr_alg,
|
||||
NULL,
|
||||
};
|
||||
|
||||
int __init p8_init(void)
|
||||
{
|
||||
int ret = 0;
|
||||
struct crypto_alg **alg_it;
|
||||
int ret = 0;
|
||||
struct crypto_alg **alg_it;
|
||||
|
||||
if (!(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO))
|
||||
return -ENODEV;
|
||||
if (!(cur_cpu_spec->cpu_user_features2 & PPC_FEATURE2_VEC_CRYPTO))
|
||||
return -ENODEV;
|
||||
|
||||
for (alg_it = algs; *alg_it; alg_it++) {
|
||||
ret = crypto_register_alg(*alg_it);
|
||||
printk(KERN_INFO "crypto_register_alg '%s' = %d\n",
|
||||
(*alg_it)->cra_name, ret);
|
||||
if (ret) {
|
||||
for (alg_it--; alg_it >= algs; alg_it--)
|
||||
crypto_unregister_alg(*alg_it);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (ret)
|
||||
return ret;
|
||||
for (alg_it = algs; *alg_it; alg_it++) {
|
||||
ret = crypto_register_alg(*alg_it);
|
||||
printk(KERN_INFO "crypto_register_alg '%s' = %d\n",
|
||||
(*alg_it)->cra_name, ret);
|
||||
if (ret) {
|
||||
for (alg_it--; alg_it >= algs; alg_it--)
|
||||
crypto_unregister_alg(*alg_it);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
ret = crypto_register_shash(&p8_ghash_alg);
|
||||
if (ret) {
|
||||
for (alg_it = algs; *alg_it; alg_it++)
|
||||
crypto_unregister_alg(*alg_it);
|
||||
}
|
||||
return ret;
|
||||
ret = crypto_register_shash(&p8_ghash_alg);
|
||||
if (ret) {
|
||||
for (alg_it = algs; *alg_it; alg_it++)
|
||||
crypto_unregister_alg(*alg_it);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void __exit p8_exit(void)
|
||||
{
|
||||
struct crypto_alg **alg_it;
|
||||
struct crypto_alg **alg_it;
|
||||
|
||||
for (alg_it = algs; *alg_it; alg_it++) {
|
||||
printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name);
|
||||
crypto_unregister_alg(*alg_it);
|
||||
}
|
||||
crypto_unregister_shash(&p8_ghash_alg);
|
||||
for (alg_it = algs; *alg_it; alg_it++) {
|
||||
printk(KERN_INFO "Removing '%s'\n", (*alg_it)->cra_name);
|
||||
crypto_unregister_alg(*alg_it);
|
||||
}
|
||||
crypto_unregister_shash(&p8_ghash_alg);
|
||||
}
|
||||
|
||||
module_init(p8_init);
|
||||
module_exit(p8_exit);
|
||||
|
||||
MODULE_AUTHOR("Marcelo Cerri<mhcerri@br.ibm.com>");
|
||||
MODULE_DESCRIPTION("IBM VMX cryptogaphic acceleration instructions support on Power 8");
|
||||
MODULE_DESCRIPTION("IBM VMX cryptographic acceleration instructions "
|
||||
"support on Power 8");
|
||||
MODULE_LICENSE("GPL");
|
||||
MODULE_VERSION("1.0.0");
|
||||
|
||||
|
Reference in New Issue
Block a user