Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu: "Here is the crypto update for 3.9: - Added accelerated implementation of crc32 using pclmulqdq. - Added test vector for fcrypt. - Added support for OMAP4/AM33XX cipher and hash. - Fixed loose crypto_user input checks. - Misc fixes" * git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (43 commits) crypto: user - ensure user supplied strings are nul-terminated crypto: user - fix empty string test in report API crypto: user - fix info leaks in report API crypto: caam - Added property fsl,sec-era in SEC4.0 device tree binding. crypto: use ERR_CAST crypto: atmel-aes - adjust duplicate test crypto: crc32-pclmul - Kill warning on x86-32 crypto: x86/twofish - assembler clean-ups: use ENTRY/ENDPROC, localize jump labels crypto: x86/sha1 - assembler clean-ups: use ENTRY/ENDPROC crypto: x86/serpent - use ENTRY/ENDPROC for assember functions and localize jump targets crypto: x86/salsa20 - assembler cleanup, use ENTRY/ENDPROC for assember functions and rename ECRYPT_* to salsa20_* crypto: x86/ghash - assembler clean-up: use ENDPROC at end of assember functions crypto: x86/crc32c - assembler clean-up: use ENTRY/ENDPROC crypto: cast6-avx: use ENTRY()/ENDPROC() for assembler functions crypto: cast5-avx: use ENTRY()/ENDPROC() for assembler functions and localize jump targets crypto: camellia-x86_64/aes-ni: use ENTRY()/ENDPROC() for assembler functions and localize jump targets crypto: blowfish-x86_64: use ENTRY()/ENDPROC() for assembler functions and localize jump targets crypto: aesni-intel - add ENDPROC statements for assembler functions crypto: x86/aes - assembler clean-ups: use ENTRY/ENDPROC, localize jump targets crypto: testmgr - add test vector for fcrypt ...
This commit is contained in:
@@ -353,6 +353,27 @@ config CRYPTO_CRC32C_SPARC64
|
||||
CRC32c CRC algorithm implemented using sparc64 crypto instructions,
|
||||
when available.
|
||||
|
||||
config CRYPTO_CRC32
|
||||
tristate "CRC32 CRC algorithm"
|
||||
select CRYPTO_HASH
|
||||
select CRC32
|
||||
help
|
||||
CRC-32-IEEE 802.3 cyclic redundancy-check algorithm.
|
||||
Shash crypto api wrappers to crc32_le function.
|
||||
|
||||
config CRYPTO_CRC32_PCLMUL
|
||||
tristate "CRC32 PCLMULQDQ hardware acceleration"
|
||||
depends on X86
|
||||
select CRYPTO_HASH
|
||||
select CRC32
|
||||
help
|
||||
From Intel Westmere and AMD Bulldozer processor with SSE4.2
|
||||
and PCLMULQDQ supported, the processor will support
|
||||
CRC32 PCLMULQDQ implementation using hardware accelerated PCLMULQDQ
|
||||
instruction. This option will create 'crc32-plcmul' module,
|
||||
which will enable any routine to use the CRC-32-IEEE 802.3 checksum
|
||||
and gain better performance as compared with the table implementation.
|
||||
|
||||
config CRYPTO_GHASH
|
||||
tristate "GHASH digest algorithm"
|
||||
select CRYPTO_GF128MUL
|
||||
|
@@ -81,6 +81,7 @@ obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o
|
||||
obj-$(CONFIG_CRYPTO_ZLIB) += zlib.o
|
||||
obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
|
||||
obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
|
||||
obj-$(CONFIG_CRYPTO_CRC32) += crc32.o
|
||||
obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o
|
||||
obj-$(CONFIG_CRYPTO_LZO) += lzo.o
|
||||
obj-$(CONFIG_CRYPTO_842) += 842.o
|
||||
|
@@ -388,9 +388,9 @@ static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_blkcipher rblkcipher;
|
||||
|
||||
snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "ablkcipher");
|
||||
snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s",
|
||||
alg->cra_ablkcipher.geniv ?: "<default>");
|
||||
strncpy(rblkcipher.type, "ablkcipher", sizeof(rblkcipher.type));
|
||||
strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<default>",
|
||||
sizeof(rblkcipher.geniv));
|
||||
|
||||
rblkcipher.blocksize = alg->cra_blocksize;
|
||||
rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
|
||||
@@ -469,9 +469,9 @@ static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_blkcipher rblkcipher;
|
||||
|
||||
snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "givcipher");
|
||||
snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s",
|
||||
alg->cra_ablkcipher.geniv ?: "<built-in>");
|
||||
strncpy(rblkcipher.type, "givcipher", sizeof(rblkcipher.type));
|
||||
strncpy(rblkcipher.geniv, alg->cra_ablkcipher.geniv ?: "<built-in>",
|
||||
sizeof(rblkcipher.geniv));
|
||||
|
||||
rblkcipher.blocksize = alg->cra_blocksize;
|
||||
rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
|
||||
|
@@ -117,9 +117,8 @@ static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
struct crypto_report_aead raead;
|
||||
struct aead_alg *aead = &alg->cra_aead;
|
||||
|
||||
snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "aead");
|
||||
snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s",
|
||||
aead->geniv ?: "<built-in>");
|
||||
strncpy(raead.type, "aead", sizeof(raead.type));
|
||||
strncpy(raead.geniv, aead->geniv ?: "<built-in>", sizeof(raead.geniv));
|
||||
|
||||
raead.blocksize = alg->cra_blocksize;
|
||||
raead.maxauthsize = aead->maxauthsize;
|
||||
@@ -203,8 +202,8 @@ static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
struct crypto_report_aead raead;
|
||||
struct aead_alg *aead = &alg->cra_aead;
|
||||
|
||||
snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "nivaead");
|
||||
snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s", aead->geniv);
|
||||
strncpy(raead.type, "nivaead", sizeof(raead.type));
|
||||
strncpy(raead.geniv, aead->geniv, sizeof(raead.geniv));
|
||||
|
||||
raead.blocksize = alg->cra_blocksize;
|
||||
raead.maxauthsize = aead->maxauthsize;
|
||||
@@ -282,18 +281,16 @@ struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
|
||||
algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(name);
|
||||
if (IS_ERR(name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
|
@@ -404,7 +404,7 @@ static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_hash rhash;
|
||||
|
||||
snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "ahash");
|
||||
strncpy(rhash.type, "ahash", sizeof(rhash.type));
|
||||
|
||||
rhash.blocksize = alg->cra_blocksize;
|
||||
rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
|
||||
|
@@ -749,12 +749,10 @@ struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
const char *name;
|
||||
int err;
|
||||
|
||||
name = crypto_attr_alg_name(rta);
|
||||
err = PTR_ERR(name);
|
||||
if (IS_ERR(name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(name);
|
||||
|
||||
return crypto_find_alg(name, frontend, type, mask);
|
||||
}
|
||||
|
@@ -592,9 +592,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
@@ -715,9 +715,8 @@ static struct crypto_instance *crypto_authenc_esn_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
@@ -499,9 +499,9 @@ static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_blkcipher rblkcipher;
|
||||
|
||||
snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "blkcipher");
|
||||
snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s",
|
||||
alg->cra_blkcipher.geniv ?: "<default>");
|
||||
strncpy(rblkcipher.type, "blkcipher", sizeof(rblkcipher.type));
|
||||
strncpy(rblkcipher.geniv, alg->cra_blkcipher.geniv ?: "<default>",
|
||||
sizeof(rblkcipher.geniv));
|
||||
|
||||
rblkcipher.blocksize = alg->cra_blocksize;
|
||||
rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
|
||||
@@ -588,18 +588,16 @@ struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
|
||||
algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(name);
|
||||
if (IS_ERR(name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
|
23
crypto/ccm.c
23
crypto/ccm.c
@@ -484,18 +484,16 @@ static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
|
||||
CRYPTO_ALG_TYPE_MASK);
|
||||
err = PTR_ERR(cipher);
|
||||
if (IS_ERR(cipher))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(cipher);
|
||||
|
||||
err = -EINVAL;
|
||||
if (cipher->cra_blocksize != 16)
|
||||
@@ -573,15 +571,13 @@ out_put_cipher:
|
||||
|
||||
static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
|
||||
{
|
||||
int err;
|
||||
const char *cipher_name;
|
||||
char ctr_name[CRYPTO_MAX_ALG_NAME];
|
||||
char full_name[CRYPTO_MAX_ALG_NAME];
|
||||
|
||||
cipher_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(cipher_name);
|
||||
if (IS_ERR(cipher_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(cipher_name);
|
||||
|
||||
if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
|
||||
cipher_name) >= CRYPTO_MAX_ALG_NAME)
|
||||
@@ -612,20 +608,17 @@ static struct crypto_template crypto_ccm_tmpl = {
|
||||
|
||||
static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
|
||||
{
|
||||
int err;
|
||||
const char *ctr_name;
|
||||
const char *cipher_name;
|
||||
char full_name[CRYPTO_MAX_ALG_NAME];
|
||||
|
||||
ctr_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(ctr_name);
|
||||
if (IS_ERR(ctr_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ctr_name);
|
||||
|
||||
cipher_name = crypto_attr_alg_name(tb[2]);
|
||||
err = PTR_ERR(cipher_name);
|
||||
if (IS_ERR(cipher_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(cipher_name);
|
||||
|
||||
if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
|
||||
ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
|
||||
@@ -760,17 +753,15 @@ static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
ccm_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(ccm_name);
|
||||
if (IS_ERR(ccm_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ccm_name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
|
@@ -291,9 +291,8 @@ static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
err = crypto_get_default_rng();
|
||||
if (err)
|
||||
|
158
crypto/crc32.c
Normal file
158
crypto/crc32.c
Normal file
@@ -0,0 +1,158 @@
|
||||
/* GPL HEADER START
|
||||
*
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License version 2 only,
|
||||
* as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful, but
|
||||
* WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* General Public License version 2 for more details (a copy is included
|
||||
* in the LICENSE file that accompanied this code).
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* version 2 along with this program; If not, see http://www.gnu.org/licenses
|
||||
*
|
||||
* Please visit http://www.xyratex.com/contact if you need additional
|
||||
* information or have any questions.
|
||||
*
|
||||
* GPL HEADER END
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright 2012 Xyratex Technology Limited
|
||||
*/
|
||||
|
||||
/*
|
||||
* This is crypto api shash wrappers to crc32_le.
|
||||
*/
|
||||
|
||||
#include <linux/crc32.h>
|
||||
#include <crypto/internal/hash.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/string.h>
|
||||
#include <linux/kernel.h>
|
||||
|
||||
#define CHKSUM_BLOCK_SIZE 1
|
||||
#define CHKSUM_DIGEST_SIZE 4
|
||||
|
||||
static u32 __crc32_le(u32 crc, unsigned char const *p, size_t len)
|
||||
{
|
||||
return crc32_le(crc, p, len);
|
||||
}
|
||||
|
||||
/** No default init with ~0 */
|
||||
static int crc32_cra_init(struct crypto_tfm *tfm)
|
||||
{
|
||||
u32 *key = crypto_tfm_ctx(tfm);
|
||||
|
||||
*key = 0;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Setting the seed allows arbitrary accumulators and flexible XOR policy
|
||||
* If your algorithm starts with ~0, then XOR with ~0 before you set
|
||||
* the seed.
|
||||
*/
|
||||
static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
u32 *mctx = crypto_shash_ctx(hash);
|
||||
|
||||
if (keylen != sizeof(u32)) {
|
||||
crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
||||
return -EINVAL;
|
||||
}
|
||||
*mctx = le32_to_cpup((__le32 *)key);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32_init(struct shash_desc *desc)
|
||||
{
|
||||
u32 *mctx = crypto_shash_ctx(desc->tfm);
|
||||
u32 *crcp = shash_desc_ctx(desc);
|
||||
|
||||
*crcp = *mctx;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32_update(struct shash_desc *desc, const u8 *data,
|
||||
unsigned int len)
|
||||
{
|
||||
u32 *crcp = shash_desc_ctx(desc);
|
||||
|
||||
*crcp = __crc32_le(*crcp, data, len);
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* No final XOR 0xFFFFFFFF, like crc32_le */
|
||||
static int __crc32_finup(u32 *crcp, const u8 *data, unsigned int len,
|
||||
u8 *out)
|
||||
{
|
||||
*(__le32 *)out = cpu_to_le32(__crc32_le(*crcp, data, len));
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32_finup(struct shash_desc *desc, const u8 *data,
|
||||
unsigned int len, u8 *out)
|
||||
{
|
||||
return __crc32_finup(shash_desc_ctx(desc), data, len, out);
|
||||
}
|
||||
|
||||
static int crc32_final(struct shash_desc *desc, u8 *out)
|
||||
{
|
||||
u32 *crcp = shash_desc_ctx(desc);
|
||||
|
||||
*(__le32 *)out = cpu_to_le32p(crcp);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int crc32_digest(struct shash_desc *desc, const u8 *data,
|
||||
unsigned int len, u8 *out)
|
||||
{
|
||||
return __crc32_finup(crypto_shash_ctx(desc->tfm), data, len,
|
||||
out);
|
||||
}
|
||||
static struct shash_alg alg = {
|
||||
.setkey = crc32_setkey,
|
||||
.init = crc32_init,
|
||||
.update = crc32_update,
|
||||
.final = crc32_final,
|
||||
.finup = crc32_finup,
|
||||
.digest = crc32_digest,
|
||||
.descsize = sizeof(u32),
|
||||
.digestsize = CHKSUM_DIGEST_SIZE,
|
||||
.base = {
|
||||
.cra_name = "crc32",
|
||||
.cra_driver_name = "crc32-table",
|
||||
.cra_priority = 100,
|
||||
.cra_blocksize = CHKSUM_BLOCK_SIZE,
|
||||
.cra_ctxsize = sizeof(u32),
|
||||
.cra_module = THIS_MODULE,
|
||||
.cra_init = crc32_cra_init,
|
||||
}
|
||||
};
|
||||
|
||||
static int __init crc32_mod_init(void)
|
||||
{
|
||||
return crypto_register_shash(&alg);
|
||||
}
|
||||
|
||||
static void __exit crc32_mod_fini(void)
|
||||
{
|
||||
crypto_unregister_shash(&alg);
|
||||
}
|
||||
|
||||
module_init(crc32_mod_init);
|
||||
module_exit(crc32_mod_fini);
|
||||
|
||||
MODULE_AUTHOR("Alexander Boyko <alexander_boyko@xyratex.com>");
|
||||
MODULE_DESCRIPTION("CRC32 calculations wrapper for lib/crc32");
|
||||
MODULE_LICENSE("GPL");
|
@@ -30,6 +30,8 @@
|
||||
|
||||
#include "internal.h"
|
||||
|
||||
#define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
|
||||
|
||||
static DEFINE_MUTEX(crypto_cfg_mutex);
|
||||
|
||||
/* The crypto netlink socket */
|
||||
@@ -75,7 +77,7 @@ static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_cipher rcipher;
|
||||
|
||||
snprintf(rcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "cipher");
|
||||
strncpy(rcipher.type, "cipher", sizeof(rcipher.type));
|
||||
|
||||
rcipher.blocksize = alg->cra_blocksize;
|
||||
rcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
|
||||
@@ -94,8 +96,7 @@ static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_comp rcomp;
|
||||
|
||||
snprintf(rcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "compression");
|
||||
|
||||
strncpy(rcomp.type, "compression", sizeof(rcomp.type));
|
||||
if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
|
||||
sizeof(struct crypto_report_comp), &rcomp))
|
||||
goto nla_put_failure;
|
||||
@@ -108,12 +109,14 @@ nla_put_failure:
|
||||
static int crypto_report_one(struct crypto_alg *alg,
|
||||
struct crypto_user_alg *ualg, struct sk_buff *skb)
|
||||
{
|
||||
memcpy(&ualg->cru_name, &alg->cra_name, sizeof(ualg->cru_name));
|
||||
memcpy(&ualg->cru_driver_name, &alg->cra_driver_name,
|
||||
sizeof(ualg->cru_driver_name));
|
||||
memcpy(&ualg->cru_module_name, module_name(alg->cra_module),
|
||||
CRYPTO_MAX_ALG_NAME);
|
||||
strncpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
|
||||
strncpy(ualg->cru_driver_name, alg->cra_driver_name,
|
||||
sizeof(ualg->cru_driver_name));
|
||||
strncpy(ualg->cru_module_name, module_name(alg->cra_module),
|
||||
sizeof(ualg->cru_module_name));
|
||||
|
||||
ualg->cru_type = 0;
|
||||
ualg->cru_mask = 0;
|
||||
ualg->cru_flags = alg->cra_flags;
|
||||
ualg->cru_refcnt = atomic_read(&alg->cra_refcnt);
|
||||
|
||||
@@ -122,8 +125,7 @@ static int crypto_report_one(struct crypto_alg *alg,
|
||||
if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
|
||||
struct crypto_report_larval rl;
|
||||
|
||||
snprintf(rl.type, CRYPTO_MAX_ALG_NAME, "%s", "larval");
|
||||
|
||||
strncpy(rl.type, "larval", sizeof(rl.type));
|
||||
if (nla_put(skb, CRYPTOCFGA_REPORT_LARVAL,
|
||||
sizeof(struct crypto_report_larval), &rl))
|
||||
goto nla_put_failure;
|
||||
@@ -196,7 +198,10 @@ static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
|
||||
struct crypto_dump_info info;
|
||||
int err;
|
||||
|
||||
if (!p->cru_driver_name)
|
||||
if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
|
||||
return -EINVAL;
|
||||
|
||||
if (!p->cru_driver_name[0])
|
||||
return -EINVAL;
|
||||
|
||||
alg = crypto_alg_match(p, 1);
|
||||
@@ -260,6 +265,9 @@ static int crypto_update_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
|
||||
struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
|
||||
LIST_HEAD(list);
|
||||
|
||||
if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
|
||||
return -EINVAL;
|
||||
|
||||
if (priority && !strlen(p->cru_driver_name))
|
||||
return -EINVAL;
|
||||
|
||||
@@ -287,6 +295,9 @@ static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
|
||||
struct crypto_alg *alg;
|
||||
struct crypto_user_alg *p = nlmsg_data(nlh);
|
||||
|
||||
if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
|
||||
return -EINVAL;
|
||||
|
||||
alg = crypto_alg_match(p, 1);
|
||||
if (!alg)
|
||||
return -ENOENT;
|
||||
@@ -368,6 +379,9 @@ static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
|
||||
struct crypto_user_alg *p = nlmsg_data(nlh);
|
||||
struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
|
||||
|
||||
if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
|
||||
return -EINVAL;
|
||||
|
||||
if (strlen(p->cru_driver_name))
|
||||
exact = 1;
|
||||
|
||||
|
@@ -343,17 +343,15 @@ static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_BLKCIPHER) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
cipher_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(cipher_name);
|
||||
if (IS_ERR(cipher_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(cipher_name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
|
@@ -282,9 +282,8 @@ static struct crypto_instance *crypto_cts_alloc(struct rtattr **tb)
|
||||
|
||||
alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
|
||||
CRYPTO_ALG_TYPE_MASK);
|
||||
err = PTR_ERR(alg);
|
||||
if (IS_ERR(alg))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(alg);
|
||||
|
||||
inst = ERR_PTR(-EINVAL);
|
||||
if (!is_power_of_2(alg->cra_blocksize))
|
||||
|
29
crypto/gcm.c
29
crypto/gcm.c
@@ -701,9 +701,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
@@ -711,9 +710,8 @@ static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
|
||||
ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
|
||||
CRYPTO_ALG_TYPE_HASH,
|
||||
CRYPTO_ALG_TYPE_AHASH_MASK);
|
||||
err = PTR_ERR(ghash_alg);
|
||||
if (IS_ERR(ghash_alg))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ghash_alg);
|
||||
|
||||
err = -ENOMEM;
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
|
||||
@@ -787,15 +785,13 @@ out_put_ghash:
|
||||
|
||||
static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
|
||||
{
|
||||
int err;
|
||||
const char *cipher_name;
|
||||
char ctr_name[CRYPTO_MAX_ALG_NAME];
|
||||
char full_name[CRYPTO_MAX_ALG_NAME];
|
||||
|
||||
cipher_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(cipher_name);
|
||||
if (IS_ERR(cipher_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(cipher_name);
|
||||
|
||||
if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
|
||||
CRYPTO_MAX_ALG_NAME)
|
||||
@@ -826,20 +822,17 @@ static struct crypto_template crypto_gcm_tmpl = {
|
||||
|
||||
static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
|
||||
{
|
||||
int err;
|
||||
const char *ctr_name;
|
||||
const char *ghash_name;
|
||||
char full_name[CRYPTO_MAX_ALG_NAME];
|
||||
|
||||
ctr_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(ctr_name);
|
||||
if (IS_ERR(ctr_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ctr_name);
|
||||
|
||||
ghash_name = crypto_attr_alg_name(tb[2]);
|
||||
err = PTR_ERR(ghash_name);
|
||||
if (IS_ERR(ghash_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ghash_name);
|
||||
|
||||
if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)",
|
||||
ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME)
|
||||
@@ -971,17 +964,15 @@ static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
ccm_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(ccm_name);
|
||||
if (IS_ERR(ccm_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ccm_name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
@@ -1222,17 +1213,15 @@ static struct crypto_instance *crypto_rfc4543_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
|
||||
return ERR_PTR(-EINVAL);
|
||||
|
||||
ccm_name = crypto_attr_alg_name(tb[1]);
|
||||
err = PTR_ERR(ccm_name);
|
||||
if (IS_ERR(ccm_name))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(ccm_name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
|
@@ -53,8 +53,7 @@ static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_comp rpcomp;
|
||||
|
||||
snprintf(rpcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "pcomp");
|
||||
|
||||
strncpy(rpcomp.type, "pcomp", sizeof(rpcomp.type));
|
||||
if (nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
|
||||
sizeof(struct crypto_report_comp), &rpcomp))
|
||||
goto nla_put_failure;
|
||||
|
@@ -65,7 +65,7 @@ static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
{
|
||||
struct crypto_report_rng rrng;
|
||||
|
||||
snprintf(rrng.type, CRYPTO_MAX_ALG_NAME, "%s", "rng");
|
||||
strncpy(rrng.type, "rng", sizeof(rrng.type));
|
||||
|
||||
rrng.seedsize = alg->cra_rng.seedsize;
|
||||
|
||||
|
@@ -305,9 +305,8 @@ static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
err = PTR_ERR(algt);
|
||||
if (IS_ERR(algt))
|
||||
return ERR_PTR(err);
|
||||
return ERR_CAST(algt);
|
||||
|
||||
err = crypto_get_default_rng();
|
||||
if (err)
|
||||
|
@@ -530,7 +530,8 @@ static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
|
||||
struct crypto_report_hash rhash;
|
||||
struct shash_alg *salg = __crypto_shash_alg(alg);
|
||||
|
||||
snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "shash");
|
||||
strncpy(rhash.type, "shash", sizeof(rhash.type));
|
||||
|
||||
rhash.blocksize = alg->cra_blocksize;
|
||||
rhash.digestsize = salg->digestsize;
|
||||
|
||||
|
@@ -2268,6 +2268,21 @@ static const struct alg_test_desc alg_test_descs[] = {
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
.alg = "ecb(fcrypt)",
|
||||
.test = alg_test_skcipher,
|
||||
.suite = {
|
||||
.cipher = {
|
||||
.enc = {
|
||||
.vecs = fcrypt_pcbc_enc_tv_template,
|
||||
.count = 1
|
||||
},
|
||||
.dec = {
|
||||
.vecs = fcrypt_pcbc_dec_tv_template,
|
||||
.count = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
.alg = "ecb(khazad)",
|
||||
.test = alg_test_skcipher,
|
||||
|
Reference in New Issue
Block a user