mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2024-12-29 09:13:38 +00:00
53cc9baeb9
clang-16 warns about casting between incompatible function types: arch/arm/crypto/sha256_glue.c:37:5: error: cast from 'void (*)(u32 *, const void *, unsigned int)' (aka 'void (*)(unsigned int *, const void *, unsigned int)') to 'sha256_block_fn *' (aka 'void (*)(struct sha256_state *, const unsigned char *, int)') converts to incompatible function type [-Werror,-Wcast-function-type-strict] 37 | (sha256_block_fn *)sha256_block_data_order); | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ arch/arm/crypto/sha512-glue.c:34:3: error: cast from 'void (*)(u64 *, const u8 *, int)' (aka 'void (*)(unsigned long long *, const unsigned char *, int)') to 'sha512_block_fn *' (aka 'void (*)(struct sha512_state *, const unsigned char *, int)') converts to incompatible function type [-Werror,-Wcast-function-type-strict] 34 | (sha512_block_fn *)sha512_block_data_order); | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Fix the prototypes for the assembler functions to match the typedef. The code already relies on the digest being the first part of the state structure, so there is no change in behavior. Fixes:c80ae7ca37
("crypto: arm/sha512 - accelerated SHA-512 using ARM generic ASM and NEON") Fixes:b59e2ae369
("crypto: arm/sha256 - move SHA-224/256 ASM/NEON implementation to base layer") Signed-off-by: Arnd Bergmann <arnd@arndb.de> Reviewed-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
118 lines
3.1 KiB
C
118 lines
3.1 KiB
C
// SPDX-License-Identifier: GPL-2.0-or-later
|
|
/*
|
|
* Glue code for the SHA256 Secure Hash Algorithm assembly implementation
|
|
* using optimized ARM assembler and NEON instructions.
|
|
*
|
|
* Copyright © 2015 Google Inc.
|
|
*
|
|
* This file is based on sha256_ssse3_glue.c:
|
|
* Copyright (C) 2013 Intel Corporation
|
|
* Author: Tim Chen <tim.c.chen@linux.intel.com>
|
|
*/
|
|
|
|
#include <crypto/internal/hash.h>
|
|
#include <linux/crypto.h>
|
|
#include <linux/init.h>
|
|
#include <linux/module.h>
|
|
#include <linux/mm.h>
|
|
#include <linux/types.h>
|
|
#include <linux/string.h>
|
|
#include <crypto/sha2.h>
|
|
#include <crypto/sha256_base.h>
|
|
#include <asm/simd.h>
|
|
#include <asm/neon.h>
|
|
|
|
#include "sha256_glue.h"
|
|
|
|
asmlinkage void sha256_block_data_order(struct sha256_state *state,
|
|
const u8 *data, int num_blks);
|
|
|
|
int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
|
|
unsigned int len)
|
|
{
|
|
/* make sure casting to sha256_block_fn() is safe */
|
|
BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
|
|
|
|
return sha256_base_do_update(desc, data, len, sha256_block_data_order);
|
|
}
|
|
EXPORT_SYMBOL(crypto_sha256_arm_update);
|
|
|
|
static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
|
|
{
|
|
sha256_base_do_finalize(desc, sha256_block_data_order);
|
|
return sha256_base_finish(desc, out);
|
|
}
|
|
|
|
int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
|
|
unsigned int len, u8 *out)
|
|
{
|
|
sha256_base_do_update(desc, data, len, sha256_block_data_order);
|
|
return crypto_sha256_arm_final(desc, out);
|
|
}
|
|
EXPORT_SYMBOL(crypto_sha256_arm_finup);
|
|
|
|
static struct shash_alg algs[] = { {
|
|
.digestsize = SHA256_DIGEST_SIZE,
|
|
.init = sha256_base_init,
|
|
.update = crypto_sha256_arm_update,
|
|
.final = crypto_sha256_arm_final,
|
|
.finup = crypto_sha256_arm_finup,
|
|
.descsize = sizeof(struct sha256_state),
|
|
.base = {
|
|
.cra_name = "sha256",
|
|
.cra_driver_name = "sha256-asm",
|
|
.cra_priority = 150,
|
|
.cra_blocksize = SHA256_BLOCK_SIZE,
|
|
.cra_module = THIS_MODULE,
|
|
}
|
|
}, {
|
|
.digestsize = SHA224_DIGEST_SIZE,
|
|
.init = sha224_base_init,
|
|
.update = crypto_sha256_arm_update,
|
|
.final = crypto_sha256_arm_final,
|
|
.finup = crypto_sha256_arm_finup,
|
|
.descsize = sizeof(struct sha256_state),
|
|
.base = {
|
|
.cra_name = "sha224",
|
|
.cra_driver_name = "sha224-asm",
|
|
.cra_priority = 150,
|
|
.cra_blocksize = SHA224_BLOCK_SIZE,
|
|
.cra_module = THIS_MODULE,
|
|
}
|
|
} };
|
|
|
|
static int __init sha256_mod_init(void)
|
|
{
|
|
int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
|
|
|
|
if (res < 0)
|
|
return res;
|
|
|
|
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
|
|
res = crypto_register_shashes(sha256_neon_algs,
|
|
ARRAY_SIZE(sha256_neon_algs));
|
|
|
|
if (res < 0)
|
|
crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
|
|
}
|
|
|
|
return res;
|
|
}
|
|
|
|
static void __exit sha256_mod_fini(void)
|
|
{
|
|
crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
|
|
|
|
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
|
|
crypto_unregister_shashes(sha256_neon_algs,
|
|
ARRAY_SIZE(sha256_neon_algs));
|
|
}
|
|
|
|
module_init(sha256_mod_init);
|
|
module_exit(sha256_mod_fini);
|
|
|
|
MODULE_LICENSE("GPL");
|
|
MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
|
|
|
|
MODULE_ALIAS_CRYPTO("sha256");
|