mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2024-12-29 17:23:36 +00:00
f235bc11cc
In aes-neonbs, instead of going through the crypto API for the parts that the bit-sliced AES code doesn't handle, namely AES-CBC encryption and single-block AES, just call the ARM scalar AES cipher directly. This basically goes back to the original approach that was used before commitb56f5cbc7e
("crypto: arm/aes-neonbs - resolve fallback cipher at runtime"). Calling the ARM scalar AES cipher directly is faster, simpler, and avoids any chance of bugs specific to the use of fallback ciphers such as module loading deadlocks which have happened twice. The deadlocks turned out to be fixable in other ways, but there's no need to rely on anything so fragile in the first place. The rationale for the above-mentioned commit was to allow people to choose to use a time-invariant AES implementation for the fallback cipher. There are a couple problems with that rationale, though: - In practice the ARM scalar AES cipher (aes-arm) was used anyway, since it has a higher priority than aes-fixed-time. Users *could* go out of their way to disable or blacklist aes-arm, or to lower its priority using NETLINK_CRYPTO, but very few users customize the crypto API to this extent. Systems with the ARMv8 Crypto Extensions used aes-ce, but the bit-sliced algorithms are irrelevant on such systems anyway. - Since commit913a3aa07d
("crypto: arm/aes - add some hardening against cache-timing attacks"), the ARM scalar AES cipher is partially hardened against cache-timing attacks. It actually works like aes-fixed-time, in that it disables interrupts and prefetches its lookup table. It does use a larger table than aes-fixed-time, but even so, it is not clear that aes-fixed-time is meaningfully more time-invariant than aes-arm. And of course, the real solution for time-invariant AES is to use a CPU that supports AES instructions. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
70 lines
1.7 KiB
C
70 lines
1.7 KiB
C
// SPDX-License-Identifier: GPL-2.0-only
|
|
/*
|
|
* Scalar AES core transform
|
|
*
|
|
* Copyright (C) 2017 Linaro Ltd.
|
|
* Author: Ard Biesheuvel <ard.biesheuvel@linaro.org>
|
|
*/
|
|
|
|
#include <crypto/aes.h>
|
|
#include <crypto/algapi.h>
|
|
#include <linux/module.h>
|
|
#include "aes-cipher.h"
|
|
|
|
EXPORT_SYMBOL_GPL(__aes_arm_encrypt);
|
|
EXPORT_SYMBOL_GPL(__aes_arm_decrypt);
|
|
|
|
static void aes_arm_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|
{
|
|
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
int rounds = 6 + ctx->key_length / 4;
|
|
|
|
__aes_arm_encrypt(ctx->key_enc, rounds, in, out);
|
|
}
|
|
|
|
static void aes_arm_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
|
|
{
|
|
struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
int rounds = 6 + ctx->key_length / 4;
|
|
|
|
__aes_arm_decrypt(ctx->key_dec, rounds, in, out);
|
|
}
|
|
|
|
static struct crypto_alg aes_alg = {
|
|
.cra_name = "aes",
|
|
.cra_driver_name = "aes-arm",
|
|
.cra_priority = 200,
|
|
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
.cra_ctxsize = sizeof(struct crypto_aes_ctx),
|
|
.cra_module = THIS_MODULE,
|
|
|
|
.cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE,
|
|
.cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE,
|
|
.cra_cipher.cia_setkey = crypto_aes_set_key,
|
|
.cra_cipher.cia_encrypt = aes_arm_encrypt,
|
|
.cra_cipher.cia_decrypt = aes_arm_decrypt,
|
|
|
|
#ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
|
|
.cra_alignmask = 3,
|
|
#endif
|
|
};
|
|
|
|
static int __init aes_init(void)
|
|
{
|
|
return crypto_register_alg(&aes_alg);
|
|
}
|
|
|
|
static void __exit aes_fini(void)
|
|
{
|
|
crypto_unregister_alg(&aes_alg);
|
|
}
|
|
|
|
module_init(aes_init);
|
|
module_exit(aes_fini);
|
|
|
|
MODULE_DESCRIPTION("Scalar AES cipher for ARM");
|
|
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
|
MODULE_LICENSE("GPL v2");
|
|
MODULE_ALIAS_CRYPTO("aes");
|