Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "Here is the crypto update for 5.3: API: - Test shash interface directly in testmgr - cra_driver_name is now mandatory Algorithms: - Replace arc4 crypto_cipher with library helper - Implement 5 way interleave for ECB, CBC and CTR on arm64 - Add xxhash - Add continuous self-test on noise source to drbg - Update jitter RNG Drivers: - Add support for SHA204A random number generator - Add support for 7211 in iproc-rng200 - Fix fuzz test failures in inside-secure - Fix fuzz test failures in talitos - Fix fuzz test failures in qat" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (143 commits) crypto: stm32/hash - remove interruptible condition for dma crypto: stm32/hash - Fix hmac issue more than 256 bytes crypto: stm32/crc32 - rename driver file crypto: amcc - remove memset after dma_alloc_coherent crypto: ccp - Switch to SPDX license identifiers crypto: ccp - Validate the the error value used to index error messages crypto: doc - Fix formatting of new crypto engine content crypto: doc - Add parameter documentation crypto: arm64/aes-ce - implement 5 way interleave for ECB, CBC and CTR crypto: arm64/aes-ce - add 5 way interleave routines crypto: talitos - drop icv_ool crypto: talitos - fix hash on SEC1. crypto: talitos - move struct talitos_edesc into talitos.h lib/scatterlist: Fix mapping iterator when sg->offset is greater than PAGE_SIZE crypto/NX: Set receive window credits to max number of CRBs in RxFIFO crypto: asymmetric_keys - select CRYPTO_HASH where needed crypto: serpent - mark __serpent_setkey_sbox noinline crypto: testmgr - dynamically allocate crypto_shash crypto: testmgr - dynamically allocate testvec_config crypto: talitos - eliminate unneeded 'done' functions at build time ...
Tento commit je obsažen v:
@@ -67,12 +67,16 @@ static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
|
||||
}
|
||||
|
||||
static inline int crypto4xx_crypt(struct skcipher_request *req,
|
||||
const unsigned int ivlen, bool decrypt)
|
||||
const unsigned int ivlen, bool decrypt,
|
||||
bool check_blocksize)
|
||||
{
|
||||
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
|
||||
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
|
||||
__le32 iv[AES_IV_SIZE];
|
||||
|
||||
if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
|
||||
return -EINVAL;
|
||||
|
||||
if (ivlen)
|
||||
crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
|
||||
|
||||
@@ -81,24 +85,34 @@ static inline int crypto4xx_crypt(struct skcipher_request *req,
|
||||
ctx->sa_len, 0, NULL);
|
||||
}
|
||||
|
||||
int crypto4xx_encrypt_noiv(struct skcipher_request *req)
|
||||
int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, 0, false);
|
||||
return crypto4xx_crypt(req, 0, false, true);
|
||||
}
|
||||
|
||||
int crypto4xx_encrypt_iv(struct skcipher_request *req)
|
||||
int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, false);
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
|
||||
}
|
||||
|
||||
int crypto4xx_decrypt_noiv(struct skcipher_request *req)
|
||||
int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, 0, true);
|
||||
return crypto4xx_crypt(req, 0, true, true);
|
||||
}
|
||||
|
||||
int crypto4xx_decrypt_iv(struct skcipher_request *req)
|
||||
int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, true);
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
|
||||
}
|
||||
|
||||
int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
|
||||
}
|
||||
|
||||
int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
|
||||
{
|
||||
return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -269,8 +283,8 @@ crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
|
||||
return ret;
|
||||
}
|
||||
|
||||
return encrypt ? crypto4xx_encrypt_iv(req)
|
||||
: crypto4xx_decrypt_iv(req);
|
||||
return encrypt ? crypto4xx_encrypt_iv_stream(req)
|
||||
: crypto4xx_decrypt_iv_stream(req);
|
||||
}
|
||||
|
||||
static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
|
||||
|
@@ -182,7 +182,6 @@ static u32 crypto4xx_build_pdr(struct crypto4xx_device *dev)
|
||||
dev->pdr_pa);
|
||||
return -ENOMEM;
|
||||
}
|
||||
memset(dev->pdr, 0, sizeof(struct ce_pd) * PPC4XX_NUM_PD);
|
||||
dev->shadow_sa_pool = dma_alloc_coherent(dev->core_dev->device,
|
||||
sizeof(union shadow_sa_buf) * PPC4XX_NUM_PD,
|
||||
&dev->shadow_sa_pool_pa,
|
||||
@@ -1210,8 +1209,8 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_IV_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_cbc,
|
||||
.encrypt = crypto4xx_encrypt_iv,
|
||||
.decrypt = crypto4xx_decrypt_iv,
|
||||
.encrypt = crypto4xx_encrypt_iv_block,
|
||||
.decrypt = crypto4xx_decrypt_iv_block,
|
||||
.init = crypto4xx_sk_init,
|
||||
.exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
@@ -1222,7 +1221,7 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_ASYNC |
|
||||
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
},
|
||||
@@ -1230,8 +1229,8 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_IV_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_cfb,
|
||||
.encrypt = crypto4xx_encrypt_iv,
|
||||
.decrypt = crypto4xx_decrypt_iv,
|
||||
.encrypt = crypto4xx_encrypt_iv_stream,
|
||||
.decrypt = crypto4xx_decrypt_iv_stream,
|
||||
.init = crypto4xx_sk_init,
|
||||
.exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
@@ -1243,7 +1242,7 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.cra_flags = CRYPTO_ALG_NEED_FALLBACK |
|
||||
CRYPTO_ALG_ASYNC |
|
||||
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
},
|
||||
@@ -1263,7 +1262,7 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_ASYNC |
|
||||
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
},
|
||||
@@ -1290,8 +1289,8 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.min_keysize = AES_MIN_KEY_SIZE,
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_ecb,
|
||||
.encrypt = crypto4xx_encrypt_noiv,
|
||||
.decrypt = crypto4xx_decrypt_noiv,
|
||||
.encrypt = crypto4xx_encrypt_noiv_block,
|
||||
.decrypt = crypto4xx_decrypt_noiv_block,
|
||||
.init = crypto4xx_sk_init,
|
||||
.exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
@@ -1302,7 +1301,7 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
|
||||
.cra_flags = CRYPTO_ALG_ASYNC |
|
||||
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
||||
.cra_blocksize = AES_BLOCK_SIZE,
|
||||
.cra_blocksize = 1,
|
||||
.cra_ctxsize = sizeof(struct crypto4xx_ctx),
|
||||
.cra_module = THIS_MODULE,
|
||||
},
|
||||
@@ -1310,8 +1309,8 @@ static struct crypto4xx_alg_common crypto4xx_alg[] = {
|
||||
.max_keysize = AES_MAX_KEY_SIZE,
|
||||
.ivsize = AES_IV_SIZE,
|
||||
.setkey = crypto4xx_setkey_aes_ofb,
|
||||
.encrypt = crypto4xx_encrypt_iv,
|
||||
.decrypt = crypto4xx_decrypt_iv,
|
||||
.encrypt = crypto4xx_encrypt_iv_stream,
|
||||
.decrypt = crypto4xx_decrypt_iv_stream,
|
||||
.init = crypto4xx_sk_init,
|
||||
.exit = crypto4xx_sk_exit,
|
||||
} },
|
||||
|
@@ -173,10 +173,12 @@ int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
|
||||
const u8 *key, unsigned int keylen);
|
||||
int crypto4xx_encrypt_ctr(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_ctr(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_iv(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_iv(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_noiv(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_noiv(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_iv_stream(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_iv_stream(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_iv_block(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_iv_block(struct skcipher_request *req);
|
||||
int crypto4xx_encrypt_noiv_block(struct skcipher_request *req);
|
||||
int crypto4xx_decrypt_noiv_block(struct skcipher_request *req);
|
||||
int crypto4xx_rfc3686_encrypt(struct skcipher_request *req);
|
||||
int crypto4xx_rfc3686_decrypt(struct skcipher_request *req);
|
||||
int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm);
|
||||
|
Odkázat v novém úkolu
Zablokovat Uživatele