Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "API: - Remove VLA usage - Add cryptostat user-space interface - Add notifier for new crypto algorithms Algorithms: - Add OFB mode - Remove speck Drivers: - Remove x86/sha*-mb as they are buggy - Remove pcbc(aes) from x86/aesni - Improve performance of arm/ghash-ce by up to 85% - Implement CTS-CBC in arm64/aes-blk, faster by up to 50% - Remove PMULL based arm64/crc32 driver - Use PMULL in arm64/crct10dif - Add aes-ctr support in s5p-sss - Add caam/qi2 driver Others: - Pick better transform if one becomes available in crc-t10dif" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (124 commits) crypto: chelsio - Update ntx queue received from cxgb4 crypto: ccree - avoid implicit enum conversion crypto: caam - add SPDX license identifier to all files crypto: caam/qi - simplify CGR allocation, freeing crypto: mxs-dcp - make symbols 'sha1_null_hash' and 'sha256_null_hash' static crypto: arm64/aes-blk - ensure XTS mask is always loaded crypto: testmgr - fix sizeof() on COMP_BUF_SIZE crypto: chtls - remove set but not used variable 'csk' crypto: axis - fix platform_no_drv_owner.cocci warnings crypto: x86/aes-ni - fix build error following fpu template removal crypto: arm64/aes - fix handling sub-block CTS-CBC inputs crypto: caam/qi2 - avoid double export crypto: mxs-dcp - Fix AES issues crypto: mxs-dcp - Fix SHA null hashes and output length crypto: mxs-dcp - Implement sha import/export crypto: aegis/generic - fix for big endian systems crypto: morus/generic - fix for big endian systems crypto: lrw - fix rebase error after out of bounds fix crypto: cavium/nitrox - use pci_alloc_irq_vectors() while enabling MSI-X. crypto: cavium/nitrox - NITROX command queue changes. ...
This commit is contained in:
@@ -44,7 +44,7 @@ struct s390_aes_ctx {
|
||||
int key_len;
|
||||
unsigned long fc;
|
||||
union {
|
||||
struct crypto_skcipher *blk;
|
||||
struct crypto_sync_skcipher *blk;
|
||||
struct crypto_cipher *cip;
|
||||
} fallback;
|
||||
};
|
||||
@@ -54,7 +54,7 @@ struct s390_xts_ctx {
|
||||
u8 pcc_key[32];
|
||||
int key_len;
|
||||
unsigned long fc;
|
||||
struct crypto_skcipher *fallback;
|
||||
struct crypto_sync_skcipher *fallback;
|
||||
};
|
||||
|
||||
struct gcm_sg_walk {
|
||||
@@ -184,14 +184,15 @@ static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
|
||||
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int ret;
|
||||
|
||||
crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
|
||||
crypto_sync_skcipher_clear_flags(sctx->fallback.blk,
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
crypto_sync_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
|
||||
ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len);
|
||||
ret = crypto_sync_skcipher_setkey(sctx->fallback.blk, key, len);
|
||||
|
||||
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
|
||||
tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) &
|
||||
tfm->crt_flags |= crypto_sync_skcipher_get_flags(sctx->fallback.blk) &
|
||||
CRYPTO_TFM_RES_MASK;
|
||||
|
||||
return ret;
|
||||
@@ -204,9 +205,9 @@ static int fallback_blk_dec(struct blkcipher_desc *desc,
|
||||
unsigned int ret;
|
||||
struct crypto_blkcipher *tfm = desc->tfm;
|
||||
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
|
||||
SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
|
||||
|
||||
skcipher_request_set_tfm(req, sctx->fallback.blk);
|
||||
skcipher_request_set_sync_tfm(req, sctx->fallback.blk);
|
||||
skcipher_request_set_callback(req, desc->flags, NULL, NULL);
|
||||
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
|
||||
|
||||
@@ -223,9 +224,9 @@ static int fallback_blk_enc(struct blkcipher_desc *desc,
|
||||
unsigned int ret;
|
||||
struct crypto_blkcipher *tfm = desc->tfm;
|
||||
struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
|
||||
SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
|
||||
|
||||
skcipher_request_set_tfm(req, sctx->fallback.blk);
|
||||
skcipher_request_set_sync_tfm(req, sctx->fallback.blk);
|
||||
skcipher_request_set_callback(req, desc->flags, NULL, NULL);
|
||||
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
|
||||
|
||||
@@ -306,8 +307,7 @@ static int fallback_init_blk(struct crypto_tfm *tfm)
|
||||
const char *name = tfm->__crt_alg->cra_name;
|
||||
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
sctx->fallback.blk = crypto_alloc_skcipher(name, 0,
|
||||
CRYPTO_ALG_ASYNC |
|
||||
sctx->fallback.blk = crypto_alloc_sync_skcipher(name, 0,
|
||||
CRYPTO_ALG_NEED_FALLBACK);
|
||||
|
||||
if (IS_ERR(sctx->fallback.blk)) {
|
||||
@@ -323,7 +323,7 @@ static void fallback_exit_blk(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
crypto_free_skcipher(sctx->fallback.blk);
|
||||
crypto_free_sync_skcipher(sctx->fallback.blk);
|
||||
}
|
||||
|
||||
static struct crypto_alg ecb_aes_alg = {
|
||||
@@ -453,14 +453,15 @@ static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
|
||||
struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
|
||||
unsigned int ret;
|
||||
|
||||
crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
|
||||
crypto_sync_skcipher_clear_flags(xts_ctx->fallback,
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
crypto_sync_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
|
||||
ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len);
|
||||
ret = crypto_sync_skcipher_setkey(xts_ctx->fallback, key, len);
|
||||
|
||||
tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
|
||||
tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) &
|
||||
tfm->crt_flags |= crypto_sync_skcipher_get_flags(xts_ctx->fallback) &
|
||||
CRYPTO_TFM_RES_MASK;
|
||||
|
||||
return ret;
|
||||
@@ -472,10 +473,10 @@ static int xts_fallback_decrypt(struct blkcipher_desc *desc,
|
||||
{
|
||||
struct crypto_blkcipher *tfm = desc->tfm;
|
||||
struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
|
||||
SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
|
||||
unsigned int ret;
|
||||
|
||||
skcipher_request_set_tfm(req, xts_ctx->fallback);
|
||||
skcipher_request_set_sync_tfm(req, xts_ctx->fallback);
|
||||
skcipher_request_set_callback(req, desc->flags, NULL, NULL);
|
||||
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
|
||||
|
||||
@@ -491,10 +492,10 @@ static int xts_fallback_encrypt(struct blkcipher_desc *desc,
|
||||
{
|
||||
struct crypto_blkcipher *tfm = desc->tfm;
|
||||
struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
|
||||
SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
|
||||
unsigned int ret;
|
||||
|
||||
skcipher_request_set_tfm(req, xts_ctx->fallback);
|
||||
skcipher_request_set_sync_tfm(req, xts_ctx->fallback);
|
||||
skcipher_request_set_callback(req, desc->flags, NULL, NULL);
|
||||
skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
|
||||
|
||||
@@ -611,8 +612,7 @@ static int xts_fallback_init(struct crypto_tfm *tfm)
|
||||
const char *name = tfm->__crt_alg->cra_name;
|
||||
struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
|
||||
CRYPTO_ALG_ASYNC |
|
||||
xts_ctx->fallback = crypto_alloc_sync_skcipher(name, 0,
|
||||
CRYPTO_ALG_NEED_FALLBACK);
|
||||
|
||||
if (IS_ERR(xts_ctx->fallback)) {
|
||||
@@ -627,7 +627,7 @@ static void xts_fallback_exit(struct crypto_tfm *tfm)
|
||||
{
|
||||
struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
|
||||
|
||||
crypto_free_skcipher(xts_ctx->fallback);
|
||||
crypto_free_sync_skcipher(xts_ctx->fallback);
|
||||
}
|
||||
|
||||
static struct crypto_alg xts_aes_alg = {
|
||||
|
Reference in New Issue
Block a user