arm64/crc-t10dif: expose CRC-T10DIF function through lib

Move the arm64 CRC-T10DIF assembly code into the lib directory and wire
it up to the library interface.  This allows it to be used without going
through the crypto API.  It remains usable via the crypto API too via
the shash algorithms that use the library interface.  Thus all the
arch-specific "shash" code becomes unnecessary and is removed.

Note: to see the diff from arch/arm64/crypto/crct10dif-ce-glue.c to
arch/arm64/lib/crc-t10dif-glue.c, view this commit with 'git show -M10'.

Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Reviewed-by: Martin K. Petersen <martin.petersen@oracle.com>
Link: https://lore.kernel.org/r/20241202012056.209768-7-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@google.com>
This commit is contained in:
Eric Biggers 2024-12-01 17:20:50 -08:00
parent 1684e82936
commit 2051da8585
9 changed files with 86 additions and 148 deletions

View File

@ -22,6 +22,7 @@ config ARM64
select ARCH_HAS_CACHE_LINE_SIZE
select ARCH_HAS_CC_PLATFORM
select ARCH_HAS_CRC32
select ARCH_HAS_CRC_T10DIF if KERNEL_MODE_NEON
select ARCH_HAS_CURRENT_STACK_POINTER
select ARCH_HAS_DEBUG_VIRTUAL
select ARCH_HAS_DEBUG_VM_PGTABLE

View File

@ -1698,7 +1698,6 @@ CONFIG_CRYPTO_SM3_ARM64_CE=m
CONFIG_CRYPTO_AES_ARM64_CE_BLK=y
CONFIG_CRYPTO_AES_ARM64_BS=m
CONFIG_CRYPTO_AES_ARM64_CE_CCM=y
CONFIG_CRYPTO_CRCT10DIF_ARM64_CE=m
CONFIG_CRYPTO_DEV_SUN8I_CE=m
CONFIG_CRYPTO_DEV_FSL_CAAM=m
CONFIG_CRYPTO_DEV_FSL_DPAA2_CAAM=m

View File

@ -312,15 +312,5 @@ config CRYPTO_SM4_ARM64_CE_GCM
- PMULL (Polynomial Multiply Long) instructions
- NEON (Advanced SIMD) extensions
config CRYPTO_CRCT10DIF_ARM64_CE
tristate "CRCT10DIF (PMULL)"
depends on KERNEL_MODE_NEON && CRC_T10DIF
select CRYPTO_HASH
help
CRC16 CRC algorithm used for the T10 (SCSI) Data Integrity Field (DIF)
Architecture: arm64 using
- PMULL (Polynomial Multiply Long) instructions
endmenu

View File

@ -44,9 +44,6 @@ ghash-ce-y := ghash-ce-glue.o ghash-ce-core.o
obj-$(CONFIG_CRYPTO_POLYVAL_ARM64_CE) += polyval-ce.o
polyval-ce-y := polyval-ce-glue.o polyval-ce-core.o
obj-$(CONFIG_CRYPTO_CRCT10DIF_ARM64_CE) += crct10dif-ce.o
crct10dif-ce-y := crct10dif-ce-core.o crct10dif-ce-glue.o
obj-$(CONFIG_CRYPTO_AES_ARM64_CE) += aes-ce-cipher.o
aes-ce-cipher-y := aes-ce-core.o aes-ce-glue.o

View File

@ -1,132 +0,0 @@
// SPDX-License-Identifier: GPL-2.0-only
/*
* Accelerated CRC-T10DIF using arm64 NEON and Crypto Extensions instructions
*
* Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
*/
#include <linux/cpufeature.h>
#include <linux/crc-t10dif.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/string.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/simd.h>
#include <asm/neon.h>
#include <asm/simd.h>
#define CRC_T10DIF_PMULL_CHUNK_SIZE 16U
asmlinkage void crc_t10dif_pmull_p8(u16 init_crc, const u8 *buf, size_t len,
u8 out[16]);
asmlinkage u16 crc_t10dif_pmull_p64(u16 init_crc, const u8 *buf, size_t len);
static int crct10dif_init(struct shash_desc *desc)
{
u16 *crc = shash_desc_ctx(desc);
*crc = 0;
return 0;
}
static int crct10dif_update_pmull_p8(struct shash_desc *desc, const u8 *data,
unsigned int length)
{
u16 *crcp = shash_desc_ctx(desc);
u16 crc = *crcp;
u8 buf[16];
if (length > CRC_T10DIF_PMULL_CHUNK_SIZE && crypto_simd_usable()) {
kernel_neon_begin();
crc_t10dif_pmull_p8(crc, data, length, buf);
kernel_neon_end();
crc = 0;
data = buf;
length = sizeof(buf);
}
*crcp = crc_t10dif_generic(crc, data, length);
return 0;
}
static int crct10dif_update_pmull_p64(struct shash_desc *desc, const u8 *data,
unsigned int length)
{
u16 *crc = shash_desc_ctx(desc);
if (length >= CRC_T10DIF_PMULL_CHUNK_SIZE && crypto_simd_usable()) {
kernel_neon_begin();
*crc = crc_t10dif_pmull_p64(*crc, data, length);
kernel_neon_end();
} else {
*crc = crc_t10dif_generic(*crc, data, length);
}
return 0;
}
static int crct10dif_final(struct shash_desc *desc, u8 *out)
{
u16 *crc = shash_desc_ctx(desc);
*(u16 *)out = *crc;
return 0;
}
static struct shash_alg crc_t10dif_alg[] = {{
.digestsize = CRC_T10DIF_DIGEST_SIZE,
.init = crct10dif_init,
.update = crct10dif_update_pmull_p8,
.final = crct10dif_final,
.descsize = CRC_T10DIF_DIGEST_SIZE,
.base.cra_name = "crct10dif",
.base.cra_driver_name = "crct10dif-arm64-neon",
.base.cra_priority = 150,
.base.cra_blocksize = CRC_T10DIF_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
}, {
.digestsize = CRC_T10DIF_DIGEST_SIZE,
.init = crct10dif_init,
.update = crct10dif_update_pmull_p64,
.final = crct10dif_final,
.descsize = CRC_T10DIF_DIGEST_SIZE,
.base.cra_name = "crct10dif",
.base.cra_driver_name = "crct10dif-arm64-ce",
.base.cra_priority = 200,
.base.cra_blocksize = CRC_T10DIF_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
}};
static int __init crc_t10dif_mod_init(void)
{
if (cpu_have_named_feature(PMULL))
return crypto_register_shashes(crc_t10dif_alg,
ARRAY_SIZE(crc_t10dif_alg));
else
/* only register the first array element */
return crypto_register_shash(crc_t10dif_alg);
}
static void __exit crc_t10dif_mod_exit(void)
{
if (cpu_have_named_feature(PMULL))
crypto_unregister_shashes(crc_t10dif_alg,
ARRAY_SIZE(crc_t10dif_alg));
else
crypto_unregister_shash(crc_t10dif_alg);
}
module_cpu_feature_match(ASIMD, crc_t10dif_mod_init);
module_exit(crc_t10dif_mod_exit);
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_DESCRIPTION("CRC-T10DIF using arm64 NEON and Crypto Extensions");
MODULE_LICENSE("GPL v2");
MODULE_ALIAS_CRYPTO("crct10dif");
MODULE_ALIAS_CRYPTO("crct10dif-arm64-ce");

View File

@ -16,6 +16,9 @@ lib-$(CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE) += uaccess_flushcache.o
obj-$(CONFIG_CRC32_ARCH) += crc32-arm64.o
crc32-arm64-y := crc32.o crc32-glue.o
obj-$(CONFIG_CRC_T10DIF_ARCH) += crc-t10dif-arm64.o
crc-t10dif-arm64-y := crc-t10dif-glue.o crc-t10dif-core.o
obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o
obj-$(CONFIG_ARM64_MTE) += mte.o

View File

@ -0,0 +1,81 @@
// SPDX-License-Identifier: GPL-2.0-only
/*
* Accelerated CRC-T10DIF using arm64 NEON and Crypto Extensions instructions
*
* Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
*/
#include <linux/cpufeature.h>
#include <linux/crc-t10dif.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/string.h>
#include <crypto/internal/simd.h>
#include <asm/neon.h>
#include <asm/simd.h>
static DEFINE_STATIC_KEY_FALSE(have_asimd);
static DEFINE_STATIC_KEY_FALSE(have_pmull);
#define CRC_T10DIF_PMULL_CHUNK_SIZE 16U
asmlinkage void crc_t10dif_pmull_p8(u16 init_crc, const u8 *buf, size_t len,
u8 out[16]);
asmlinkage u16 crc_t10dif_pmull_p64(u16 init_crc, const u8 *buf, size_t len);
u16 crc_t10dif_arch(u16 crc, const u8 *data, size_t length)
{
if (length >= CRC_T10DIF_PMULL_CHUNK_SIZE) {
if (static_branch_likely(&have_pmull)) {
if (crypto_simd_usable()) {
kernel_neon_begin();
crc = crc_t10dif_pmull_p64(crc, data, length);
kernel_neon_end();
return crc;
}
} else if (length > CRC_T10DIF_PMULL_CHUNK_SIZE &&
static_branch_likely(&have_asimd) &&
crypto_simd_usable()) {
u8 buf[16];
kernel_neon_begin();
crc_t10dif_pmull_p8(crc, data, length, buf);
kernel_neon_end();
crc = 0;
data = buf;
length = sizeof(buf);
}
}
return crc_t10dif_generic(crc, data, length);
}
EXPORT_SYMBOL(crc_t10dif_arch);
static int __init crc_t10dif_arm64_init(void)
{
if (cpu_have_named_feature(ASIMD)) {
static_branch_enable(&have_asimd);
if (cpu_have_named_feature(PMULL))
static_branch_enable(&have_pmull);
}
return 0;
}
arch_initcall(crc_t10dif_arm64_init);
static void __exit crc_t10dif_arm64_exit(void)
{
}
module_exit(crc_t10dif_arm64_exit);
bool crc_t10dif_is_optimized(void)
{
return static_key_enabled(&have_asimd);
}
EXPORT_SYMBOL(crc_t10dif_is_optimized);
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_DESCRIPTION("CRC-T10DIF using arm64 NEON and Crypto Extensions");
MODULE_LICENSE("GPL v2");

View File

@ -46,8 +46,7 @@ static void handle_kick_signal(int sig, siginfo_t *info, void *context)
}
static char *drivers[] = {
"crct10dif-arm64-ce",
/* "crct10dif-arm64-neon", - Same priority as generic */
"crct10dif-arm64",
"sha1-ce",
"sha224-arm64",
"sha224-arm64-neon",