mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git
synced 2024-12-29 01:05:29 +00:00
crypto: null - Remove VLA usage of skcipher
In the quest to remove all stack VLA usage from the kernel[1], this replaces struct crypto_skcipher and SKCIPHER_REQUEST_ON_STACK() usage with struct crypto_sync_skcipher and SYNC_SKCIPHER_REQUEST_ON_STACK(), which uses a fixed stack size. [1] https://lkml.kernel.org/r/CA+55aFzCG-zNmZwX4A2FQpadafLfEzK6CC=qPXydAacU1RqZWA@mail.gmail.com Signed-off-by: Kees Cook <keescook@chromium.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
parent
5c525640ef
commit
8d60539842
@ -42,7 +42,7 @@
|
||||
|
||||
struct aead_tfm {
|
||||
struct crypto_aead *aead;
|
||||
struct crypto_skcipher *null_tfm;
|
||||
struct crypto_sync_skcipher *null_tfm;
|
||||
};
|
||||
|
||||
static inline bool aead_sufficient_data(struct sock *sk)
|
||||
@ -75,13 +75,13 @@ static int aead_sendmsg(struct socket *sock, struct msghdr *msg, size_t size)
|
||||
return af_alg_sendmsg(sock, msg, size, ivsize);
|
||||
}
|
||||
|
||||
static int crypto_aead_copy_sgl(struct crypto_skcipher *null_tfm,
|
||||
static int crypto_aead_copy_sgl(struct crypto_sync_skcipher *null_tfm,
|
||||
struct scatterlist *src,
|
||||
struct scatterlist *dst, unsigned int len)
|
||||
{
|
||||
SKCIPHER_REQUEST_ON_STACK(skreq, null_tfm);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, null_tfm);
|
||||
|
||||
skcipher_request_set_tfm(skreq, null_tfm);
|
||||
skcipher_request_set_sync_tfm(skreq, null_tfm);
|
||||
skcipher_request_set_callback(skreq, CRYPTO_TFM_REQ_MAY_BACKLOG,
|
||||
NULL, NULL);
|
||||
skcipher_request_set_crypt(skreq, src, dst, len, NULL);
|
||||
@ -99,7 +99,7 @@ static int _aead_recvmsg(struct socket *sock, struct msghdr *msg,
|
||||
struct af_alg_ctx *ctx = ask->private;
|
||||
struct aead_tfm *aeadc = pask->private;
|
||||
struct crypto_aead *tfm = aeadc->aead;
|
||||
struct crypto_skcipher *null_tfm = aeadc->null_tfm;
|
||||
struct crypto_sync_skcipher *null_tfm = aeadc->null_tfm;
|
||||
unsigned int i, as = crypto_aead_authsize(tfm);
|
||||
struct af_alg_async_req *areq;
|
||||
struct af_alg_tsgl *tsgl, *tmp;
|
||||
@ -478,7 +478,7 @@ static void *aead_bind(const char *name, u32 type, u32 mask)
|
||||
{
|
||||
struct aead_tfm *tfm;
|
||||
struct crypto_aead *aead;
|
||||
struct crypto_skcipher *null_tfm;
|
||||
struct crypto_sync_skcipher *null_tfm;
|
||||
|
||||
tfm = kzalloc(sizeof(*tfm), GFP_KERNEL);
|
||||
if (!tfm)
|
||||
|
@ -33,7 +33,7 @@ struct authenc_instance_ctx {
|
||||
struct crypto_authenc_ctx {
|
||||
struct crypto_ahash *auth;
|
||||
struct crypto_skcipher *enc;
|
||||
struct crypto_skcipher *null;
|
||||
struct crypto_sync_skcipher *null;
|
||||
};
|
||||
|
||||
struct authenc_request_ctx {
|
||||
@ -185,9 +185,9 @@ static int crypto_authenc_copy_assoc(struct aead_request *req)
|
||||
{
|
||||
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
|
||||
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
|
||||
SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
|
||||
|
||||
skcipher_request_set_tfm(skreq, ctx->null);
|
||||
skcipher_request_set_sync_tfm(skreq, ctx->null);
|
||||
skcipher_request_set_callback(skreq, aead_request_flags(req),
|
||||
NULL, NULL);
|
||||
skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen,
|
||||
@ -318,7 +318,7 @@ static int crypto_authenc_init_tfm(struct crypto_aead *tfm)
|
||||
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(tfm);
|
||||
struct crypto_ahash *auth;
|
||||
struct crypto_skcipher *enc;
|
||||
struct crypto_skcipher *null;
|
||||
struct crypto_sync_skcipher *null;
|
||||
int err;
|
||||
|
||||
auth = crypto_spawn_ahash(&ictx->auth);
|
||||
|
@ -36,7 +36,7 @@ struct crypto_authenc_esn_ctx {
|
||||
unsigned int reqoff;
|
||||
struct crypto_ahash *auth;
|
||||
struct crypto_skcipher *enc;
|
||||
struct crypto_skcipher *null;
|
||||
struct crypto_sync_skcipher *null;
|
||||
};
|
||||
|
||||
struct authenc_esn_request_ctx {
|
||||
@ -183,9 +183,9 @@ static int crypto_authenc_esn_copy(struct aead_request *req, unsigned int len)
|
||||
{
|
||||
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
|
||||
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
|
||||
SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
|
||||
|
||||
skcipher_request_set_tfm(skreq, ctx->null);
|
||||
skcipher_request_set_sync_tfm(skreq, ctx->null);
|
||||
skcipher_request_set_callback(skreq, aead_request_flags(req),
|
||||
NULL, NULL);
|
||||
skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL);
|
||||
@ -341,7 +341,7 @@ static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
|
||||
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
|
||||
struct crypto_ahash *auth;
|
||||
struct crypto_skcipher *enc;
|
||||
struct crypto_skcipher *null;
|
||||
struct crypto_sync_skcipher *null;
|
||||
int err;
|
||||
|
||||
auth = crypto_spawn_ahash(&ictx->auth);
|
||||
|
@ -26,7 +26,7 @@
|
||||
#include <linux/string.h>
|
||||
|
||||
static DEFINE_MUTEX(crypto_default_null_skcipher_lock);
|
||||
static struct crypto_skcipher *crypto_default_null_skcipher;
|
||||
static struct crypto_sync_skcipher *crypto_default_null_skcipher;
|
||||
static int crypto_default_null_skcipher_refcnt;
|
||||
|
||||
static int null_compress(struct crypto_tfm *tfm, const u8 *src,
|
||||
@ -152,16 +152,15 @@ MODULE_ALIAS_CRYPTO("compress_null");
|
||||
MODULE_ALIAS_CRYPTO("digest_null");
|
||||
MODULE_ALIAS_CRYPTO("cipher_null");
|
||||
|
||||
struct crypto_skcipher *crypto_get_default_null_skcipher(void)
|
||||
struct crypto_sync_skcipher *crypto_get_default_null_skcipher(void)
|
||||
{
|
||||
struct crypto_skcipher *tfm;
|
||||
struct crypto_sync_skcipher *tfm;
|
||||
|
||||
mutex_lock(&crypto_default_null_skcipher_lock);
|
||||
tfm = crypto_default_null_skcipher;
|
||||
|
||||
if (!tfm) {
|
||||
tfm = crypto_alloc_skcipher("ecb(cipher_null)",
|
||||
0, CRYPTO_ALG_ASYNC);
|
||||
tfm = crypto_alloc_sync_skcipher("ecb(cipher_null)", 0, 0);
|
||||
if (IS_ERR(tfm))
|
||||
goto unlock;
|
||||
|
||||
@ -181,7 +180,7 @@ void crypto_put_default_null_skcipher(void)
|
||||
{
|
||||
mutex_lock(&crypto_default_null_skcipher_lock);
|
||||
if (!--crypto_default_null_skcipher_refcnt) {
|
||||
crypto_free_skcipher(crypto_default_null_skcipher);
|
||||
crypto_free_sync_skcipher(crypto_default_null_skcipher);
|
||||
crypto_default_null_skcipher = NULL;
|
||||
}
|
||||
mutex_unlock(&crypto_default_null_skcipher_lock);
|
||||
|
@ -47,9 +47,9 @@ static int echainiv_encrypt(struct aead_request *req)
|
||||
info = req->iv;
|
||||
|
||||
if (req->src != req->dst) {
|
||||
SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
|
||||
|
||||
skcipher_request_set_tfm(nreq, ctx->sknull);
|
||||
skcipher_request_set_sync_tfm(nreq, ctx->sknull);
|
||||
skcipher_request_set_callback(nreq, req->base.flags,
|
||||
NULL, NULL);
|
||||
skcipher_request_set_crypt(nreq, req->src, req->dst,
|
||||
|
@ -50,7 +50,7 @@ struct crypto_rfc4543_instance_ctx {
|
||||
|
||||
struct crypto_rfc4543_ctx {
|
||||
struct crypto_aead *child;
|
||||
struct crypto_skcipher *null;
|
||||
struct crypto_sync_skcipher *null;
|
||||
u8 nonce[4];
|
||||
};
|
||||
|
||||
@ -1067,9 +1067,9 @@ static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
|
||||
unsigned int authsize = crypto_aead_authsize(aead);
|
||||
unsigned int nbytes = req->assoclen + req->cryptlen -
|
||||
(enc ? 0 : authsize);
|
||||
SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
|
||||
|
||||
skcipher_request_set_tfm(nreq, ctx->null);
|
||||
skcipher_request_set_sync_tfm(nreq, ctx->null);
|
||||
skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL);
|
||||
skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL);
|
||||
|
||||
@ -1093,7 +1093,7 @@ static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm)
|
||||
struct crypto_aead_spawn *spawn = &ictx->aead;
|
||||
struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
|
||||
struct crypto_aead *aead;
|
||||
struct crypto_skcipher *null;
|
||||
struct crypto_sync_skcipher *null;
|
||||
unsigned long align;
|
||||
int err = 0;
|
||||
|
||||
|
@ -73,9 +73,9 @@ static int seqiv_aead_encrypt(struct aead_request *req)
|
||||
info = req->iv;
|
||||
|
||||
if (req->src != req->dst) {
|
||||
SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
|
||||
|
||||
skcipher_request_set_tfm(nreq, ctx->sknull);
|
||||
skcipher_request_set_sync_tfm(nreq, ctx->sknull);
|
||||
skcipher_request_set_callback(nreq, req->base.flags,
|
||||
NULL, NULL);
|
||||
skcipher_request_set_crypt(nreq, req->src, req->dst,
|
||||
|
@ -20,7 +20,7 @@
|
||||
struct aead_geniv_ctx {
|
||||
spinlock_t lock;
|
||||
struct crypto_aead *child;
|
||||
struct crypto_skcipher *sknull;
|
||||
struct crypto_sync_skcipher *sknull;
|
||||
u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
|
||||
};
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
#define NULL_DIGEST_SIZE 0
|
||||
#define NULL_IV_SIZE 0
|
||||
|
||||
struct crypto_skcipher *crypto_get_default_null_skcipher(void);
|
||||
struct crypto_sync_skcipher *crypto_get_default_null_skcipher(void);
|
||||
void crypto_put_default_null_skcipher(void);
|
||||
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user