lib/crypto: chacha: Consolidate into single module

Consolidate the ChaCha code into a single module (excluding
chacha-block-generic.c which remains always built-in for random.c),
similar to various other algorithms:

- Each arch now provides a header file lib/crypto/$(SRCARCH)/chacha.h,
  replacing lib/crypto/$(SRCARCH)/chacha*.c.  The header defines
  chacha_crypt_arch() and hchacha_block_arch().  It is included by
  lib/crypto/chacha.c, and thus the code gets built into the single
  libchacha module, with improved inlining in some cases.

- Whether arch-optimized ChaCha is buildable is now controlled centrally
  by lib/crypto/Kconfig instead of by lib/crypto/$(SRCARCH)/Kconfig.
  The conditions for enabling it remain the same as before, and it
  remains enabled by default.

- Any additional arch-specific translation units for the optimized
  ChaCha code, such as assembly files, are now compiled by
  lib/crypto/Makefile instead of lib/crypto/$(SRCARCH)/Makefile.

This removes the last use for the Makefile and Kconfig files in the
arm64, mips, powerpc, riscv, and s390 subdirectories of lib/crypto/.  So
also remove those files and the references to them.

Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20250827151131.27733-7-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
pull/1354/merge
Eric Biggers 2025-08-27 08:11:25 -07:00
parent 1ae46b6eb5
commit 13cecc526d
25 changed files with 127 additions and 290 deletions

View File

@ -45,19 +45,11 @@ static inline void chacha20_block(struct chacha_state *state,
chacha_block_generic(state, out, 20); chacha_block_generic(state, out, 20);
} }
void hchacha_block_arch(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds);
void hchacha_block_generic(const struct chacha_state *state, void hchacha_block_generic(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds); u32 out[HCHACHA_OUT_WORDS], int nrounds);
static inline void hchacha_block(const struct chacha_state *state, void hchacha_block(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds) u32 out[HCHACHA_OUT_WORDS], int nrounds);
{
if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
hchacha_block_arch(state, out, nrounds);
else
hchacha_block_generic(state, out, nrounds);
}
enum chacha_constants { /* expand 32-byte k */ enum chacha_constants { /* expand 32-byte k */
CHACHA_CONSTANT_EXPA = 0x61707865U, CHACHA_CONSTANT_EXPA = 0x61707865U,
@ -93,20 +85,8 @@ static inline void chacha_init(struct chacha_state *state,
state->x[15] = get_unaligned_le32(iv + 12); state->x[15] = get_unaligned_le32(iv + 12);
} }
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, void chacha_crypt(struct chacha_state *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds); unsigned int bytes, int nrounds);
void chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds);
static inline void chacha_crypt(struct chacha_state *state,
u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{
if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
chacha_crypt_arch(state, dst, src, bytes, nrounds);
else
chacha_crypt_generic(state, dst, src, bytes, nrounds);
}
static inline void chacha20_crypt(struct chacha_state *state, static inline void chacha20_crypt(struct chacha_state *state,
u8 *dst, const u8 *src, unsigned int bytes) u8 *dst, const u8 *src, unsigned int bytes)

View File

@ -44,29 +44,23 @@ config CRYPTO_LIB_BLAKE2S_GENERIC
implementation is enabled, this implementation serves the users implementation is enabled, this implementation serves the users
of CRYPTO_LIB_BLAKE2S. of CRYPTO_LIB_BLAKE2S.
config CRYPTO_ARCH_HAVE_LIB_CHACHA
bool
help
Declares whether the architecture provides an arch-specific
accelerated implementation of the ChaCha library interface,
either builtin or as a module.
config CRYPTO_LIB_CHACHA_GENERIC
tristate
default CRYPTO_LIB_CHACHA if !CRYPTO_ARCH_HAVE_LIB_CHACHA
select CRYPTO_LIB_UTILS
help
This symbol can be selected by arch implementations of the ChaCha
library interface that require the generic code as a fallback, e.g.,
for SIMD implementations. If no arch specific implementation is
enabled, this implementation serves the users of CRYPTO_LIB_CHACHA.
config CRYPTO_LIB_CHACHA config CRYPTO_LIB_CHACHA
tristate tristate
select CRYPTO_LIB_UTILS
help help
Enable the ChaCha library interface. This interface may be fulfilled Enable the ChaCha library interface. Select this if your module uses
by either the generic implementation or an arch-specific one, if one chacha_crypt() or hchacha_block().
is available and enabled.
config CRYPTO_LIB_CHACHA_ARCH
bool
depends on CRYPTO_LIB_CHACHA && !UML && !KMSAN
default y if ARM
default y if ARM64 && KERNEL_MODE_NEON
default y if MIPS && CPU_MIPS32_R2
default y if PPC64 && CPU_LITTLE_ENDIAN && VSX
default y if RISCV && 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
default y if S390
default y if X86_64
config CRYPTO_ARCH_HAVE_LIB_CURVE25519 config CRYPTO_ARCH_HAVE_LIB_CURVE25519
bool bool
@ -218,21 +212,6 @@ if !KMSAN # avoid false positives from assembly
if ARM if ARM
source "lib/crypto/arm/Kconfig" source "lib/crypto/arm/Kconfig"
endif endif
if ARM64
source "lib/crypto/arm64/Kconfig"
endif
if MIPS
source "lib/crypto/mips/Kconfig"
endif
if PPC
source "lib/crypto/powerpc/Kconfig"
endif
if RISCV
source "lib/crypto/riscv/Kconfig"
endif
if S390
source "lib/crypto/s390/Kconfig"
endif
if X86 if X86
source "lib/crypto/x86/Kconfig" source "lib/crypto/x86/Kconfig"
endif endif

View File

@ -15,11 +15,6 @@ obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o
obj-$(CONFIG_CRYPTO_LIB_UTILS) += libcryptoutils.o obj-$(CONFIG_CRYPTO_LIB_UTILS) += libcryptoutils.o
libcryptoutils-y := memneq.o utils.o libcryptoutils-y := memneq.o utils.o
# chacha20_block() is used by the /dev/random driver which is always builtin
obj-y += chacha-block-generic.o
obj-$(CONFIG_CRYPTO_LIB_CHACHA_GENERIC) += libchacha.o
libchacha-y := chacha.o
obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
libaes-y := aes.o libaes-y := aes.o
@ -40,6 +35,39 @@ libblake2s-y := blake2s.o
libblake2s-$(CONFIG_CRYPTO_LIB_BLAKE2S_GENERIC) += blake2s-generic.o libblake2s-$(CONFIG_CRYPTO_LIB_BLAKE2S_GENERIC) += blake2s-generic.o
libblake2s-$(CONFIG_CRYPTO_SELFTESTS) += blake2s-selftest.o libblake2s-$(CONFIG_CRYPTO_SELFTESTS) += blake2s-selftest.o
################################################################################
# chacha20_block() is used by the /dev/random driver which is always builtin
obj-y += chacha-block-generic.o
obj-$(CONFIG_CRYPTO_LIB_CHACHA) += libchacha.o
libchacha-y := chacha.o
ifeq ($(CONFIG_CRYPTO_LIB_CHACHA_ARCH),y)
CFLAGS_chacha.o += -I$(src)/$(SRCARCH)
ifeq ($(CONFIG_ARM),y)
libchacha-y += arm/chacha-scalar-core.o
libchacha-$(CONFIG_KERNEL_MODE_NEON) += arm/chacha-neon-core.o
endif
libchacha-$(CONFIG_ARM64) += arm64/chacha-neon-core.o
ifeq ($(CONFIG_MIPS),y)
libchacha-y += mips/chacha-core.o
AFLAGS_mips/chacha-core.o += -O2 # needed to fill branch delay slots
endif
libchacha-$(CONFIG_PPC) += powerpc/chacha-p10le-8x.o
libchacha-$(CONFIG_RISCV) += riscv/chacha-riscv64-zvkb.o
libchacha-$(CONFIG_S390) += s390/chacha-s390.o
libchacha-$(CONFIG_X86) += x86/chacha-ssse3-x86_64.o \
x86/chacha-avx2-x86_64.o \
x86/chacha-avx512vl-x86_64.o
endif # CONFIG_CRYPTO_LIB_CHACHA_ARCH
################################################################################
obj-$(CONFIG_CRYPTO_LIB_CHACHA20POLY1305) += libchacha20poly1305.o obj-$(CONFIG_CRYPTO_LIB_CHACHA20POLY1305) += libchacha20poly1305.o
libchacha20poly1305-y += chacha20poly1305.o libchacha20poly1305-y += chacha20poly1305.o
libchacha20poly1305-$(CONFIG_CRYPTO_SELFTESTS) += chacha20poly1305-selftest.o libchacha20poly1305-$(CONFIG_CRYPTO_SELFTESTS) += chacha20poly1305-selftest.o
@ -231,11 +259,6 @@ obj-$(CONFIG_CRYPTO_LIB_SM3) += libsm3.o
libsm3-y := sm3.o libsm3-y := sm3.o
obj-$(CONFIG_ARM) += arm/ obj-$(CONFIG_ARM) += arm/
obj-$(CONFIG_ARM64) += arm64/
obj-$(CONFIG_MIPS) += mips/
obj-$(CONFIG_PPC) += powerpc/
obj-$(CONFIG_RISCV) += riscv/
obj-$(CONFIG_S390) += s390/
obj-$(CONFIG_X86) += x86/ obj-$(CONFIG_X86) += x86/
# clean-files must be defined unconditionally # clean-files must be defined unconditionally

View File

@ -12,8 +12,3 @@ config CRYPTO_BLAKE2S_ARM
BLAKE2b, but slower than the NEON implementation of BLAKE2b. BLAKE2b, but slower than the NEON implementation of BLAKE2b.
There is no NEON implementation of BLAKE2s, since NEON doesn't There is no NEON implementation of BLAKE2s, since NEON doesn't
really help with it. really help with it.
config CRYPTO_CHACHA20_NEON
tristate
default CRYPTO_LIB_CHACHA
select CRYPTO_ARCH_HAVE_LIB_CHACHA

View File

@ -2,7 +2,3 @@
obj-$(CONFIG_CRYPTO_BLAKE2S_ARM) += libblake2s-arm.o obj-$(CONFIG_CRYPTO_BLAKE2S_ARM) += libblake2s-arm.o
libblake2s-arm-y := blake2s-core.o blake2s-glue.o libblake2s-arm-y := blake2s-core.o blake2s-glue.o
obj-$(CONFIG_CRYPTO_CHACHA20_NEON) += chacha-neon.o
chacha-neon-y := chacha-scalar-core.o chacha-glue.o
chacha-neon-$(CONFIG_KERNEL_MODE_NEON) += chacha-neon-core.o

View File

@ -1,4 +1,4 @@
// SPDX-License-Identifier: GPL-2.0 /* SPDX-License-Identifier: GPL-2.0 */
/* /*
* ChaCha and HChaCha functions (ARM optimized) * ChaCha and HChaCha functions (ARM optimized)
* *
@ -6,11 +6,9 @@
* Copyright (C) 2015 Martin Willi * Copyright (C) 2015 Martin Willi
*/ */
#include <crypto/chacha.h>
#include <crypto/internal/simd.h> #include <crypto/internal/simd.h>
#include <linux/jump_label.h> #include <linux/jump_label.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h>
#include <asm/cputype.h> #include <asm/cputype.h>
#include <asm/hwcap.h> #include <asm/hwcap.h>
@ -64,8 +62,8 @@ static void chacha_doneon(struct chacha_state *state, u8 *dst, const u8 *src,
} }
} }
void hchacha_block_arch(const struct chacha_state *state, static void hchacha_block_arch(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds) u32 out[HCHACHA_OUT_WORDS], int nrounds)
{ {
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable()) { if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable()) {
hchacha_block_arm(state, out, nrounds); hchacha_block_arm(state, out, nrounds);
@ -75,10 +73,9 @@ void hchacha_block_arch(const struct chacha_state *state,
kernel_neon_end(); kernel_neon_end();
} }
} }
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
unsigned int bytes, int nrounds) const u8 *src, unsigned int bytes, int nrounds)
{ {
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() || if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() ||
bytes <= CHACHA_BLOCK_SIZE) { bytes <= CHACHA_BLOCK_SIZE) {
@ -99,9 +96,9 @@ void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
dst += todo; dst += todo;
} while (bytes); } while (bytes);
} }
EXPORT_SYMBOL(chacha_crypt_arch);
static int __init chacha_arm_mod_init(void) #define chacha_mod_init_arch chacha_mod_init_arch
static void chacha_mod_init_arch(void)
{ {
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) { if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
switch (read_cpuid_part()) { switch (read_cpuid_part()) {
@ -117,15 +114,4 @@ static int __init chacha_arm_mod_init(void)
static_branch_enable(&use_neon); static_branch_enable(&use_neon);
} }
} }
return 0;
} }
subsys_initcall(chacha_arm_mod_init);
static void __exit chacha_arm_mod_exit(void)
{
}
module_exit(chacha_arm_mod_exit);
MODULE_DESCRIPTION("ChaCha and HChaCha functions (ARM optimized)");
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_LICENSE("GPL v2");

View File

@ -1,8 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
config CRYPTO_CHACHA20_NEON
tristate
depends on KERNEL_MODE_NEON
default CRYPTO_LIB_CHACHA
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_ARCH_HAVE_LIB_CHACHA

View File

@ -1,4 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
obj-$(CONFIG_CRYPTO_CHACHA20_NEON) += chacha-neon.o
chacha-neon-y := chacha-neon-core.o chacha-neon-glue.o

View File

@ -18,11 +18,9 @@
* (at your option) any later version. * (at your option) any later version.
*/ */
#include <crypto/chacha.h>
#include <crypto/internal/simd.h> #include <crypto/internal/simd.h>
#include <linux/jump_label.h> #include <linux/jump_label.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h>
#include <asm/hwcap.h> #include <asm/hwcap.h>
#include <asm/neon.h> #include <asm/neon.h>
@ -61,8 +59,8 @@ static void chacha_doneon(struct chacha_state *state, u8 *dst, const u8 *src,
} }
} }
void hchacha_block_arch(const struct chacha_state *state, static void hchacha_block_arch(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds) u32 out[HCHACHA_OUT_WORDS], int nrounds)
{ {
if (!static_branch_likely(&have_neon) || !crypto_simd_usable()) { if (!static_branch_likely(&have_neon) || !crypto_simd_usable()) {
hchacha_block_generic(state, out, nrounds); hchacha_block_generic(state, out, nrounds);
@ -72,10 +70,9 @@ void hchacha_block_arch(const struct chacha_state *state,
kernel_neon_end(); kernel_neon_end();
} }
} }
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
unsigned int bytes, int nrounds) const u8 *src, unsigned int bytes, int nrounds)
{ {
if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE || if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE ||
!crypto_simd_usable()) !crypto_simd_usable())
@ -93,21 +90,10 @@ void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
dst += todo; dst += todo;
} while (bytes); } while (bytes);
} }
EXPORT_SYMBOL(chacha_crypt_arch);
static int __init chacha_simd_mod_init(void) #define chacha_mod_init_arch chacha_mod_init_arch
static void chacha_mod_init_arch(void)
{ {
if (cpu_have_named_feature(ASIMD)) if (cpu_have_named_feature(ASIMD))
static_branch_enable(&have_neon); static_branch_enable(&have_neon);
return 0;
} }
subsys_initcall(chacha_simd_mod_init);
static void __exit chacha_simd_mod_exit(void)
{
}
module_exit(chacha_simd_mod_exit);
MODULE_DESCRIPTION("ChaCha and HChaCha functions (ARM64 optimized)");
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_LICENSE("GPL v2");

View File

@ -11,8 +11,9 @@
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
void chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src, static void __maybe_unused
unsigned int bytes, int nrounds) chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{ {
/* aligned to potentially speed up crypto_xor() */ /* aligned to potentially speed up crypto_xor() */
u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long)); u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
@ -29,7 +30,41 @@ void chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src,
crypto_xor_cpy(dst, src, stream, bytes); crypto_xor_cpy(dst, src, stream, bytes);
} }
} }
EXPORT_SYMBOL(chacha_crypt_generic);
#ifdef CONFIG_CRYPTO_LIB_CHACHA_ARCH
#include "chacha.h" /* $(SRCARCH)/chacha.h */
#else
#define chacha_crypt_arch chacha_crypt_generic
#define hchacha_block_arch hchacha_block_generic
#endif
void chacha_crypt(struct chacha_state *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{
chacha_crypt_arch(state, dst, src, bytes, nrounds);
}
EXPORT_SYMBOL_GPL(chacha_crypt);
void hchacha_block(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds)
{
hchacha_block_arch(state, out, nrounds);
}
EXPORT_SYMBOL_GPL(hchacha_block);
#ifdef chacha_mod_init_arch
static int __init chacha_mod_init(void)
{
chacha_mod_init_arch();
return 0;
}
subsys_initcall(chacha_mod_init);
static void __exit chacha_mod_exit(void)
{
}
module_exit(chacha_mod_exit);
#endif
MODULE_DESCRIPTION("ChaCha stream cipher (RFC7539)"); MODULE_DESCRIPTION("ChaCha stream cipher (RFC7539)");
MODULE_LICENSE("GPL"); MODULE_LICENSE("GPL");

View File

@ -1,7 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
config CRYPTO_CHACHA_MIPS
tristate
depends on CPU_MIPS32_R2
default CRYPTO_LIB_CHACHA
select CRYPTO_ARCH_HAVE_LIB_CHACHA

View File

@ -1,5 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
obj-$(CONFIG_CRYPTO_CHACHA_MIPS) += chacha-mips.o
chacha-mips-y := chacha-core.o chacha-glue.o
AFLAGS_chacha-core.o += -O2 # needed to fill branch delay slots

View File

@ -1,23 +1,14 @@
// SPDX-License-Identifier: GPL-2.0 /* SPDX-License-Identifier: GPL-2.0 */
/* /*
* ChaCha and HChaCha functions (MIPS optimized) * ChaCha and HChaCha functions (MIPS optimized)
* *
* Copyright (C) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org> * Copyright (C) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
*/ */
#include <crypto/chacha.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h>
asmlinkage void chacha_crypt_arch(struct chacha_state *state, asmlinkage void chacha_crypt_arch(struct chacha_state *state,
u8 *dst, const u8 *src, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds); unsigned int bytes, int nrounds);
EXPORT_SYMBOL(chacha_crypt_arch);
asmlinkage void hchacha_block_arch(const struct chacha_state *state, asmlinkage void hchacha_block_arch(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds); u32 out[HCHACHA_OUT_WORDS], int nrounds);
EXPORT_SYMBOL(hchacha_block_arch);
MODULE_DESCRIPTION("ChaCha and HChaCha functions (MIPS optimized)");
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_LICENSE("GPL v2");

View File

@ -1,8 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
config CRYPTO_CHACHA20_P10
tristate
depends on PPC64 && CPU_LITTLE_ENDIAN && VSX
default CRYPTO_LIB_CHACHA
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_ARCH_HAVE_LIB_CHACHA

View File

@ -1,4 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
obj-$(CONFIG_CRYPTO_CHACHA20_P10) += chacha-p10-crypto.o
chacha-p10-crypto-y := chacha-p10-glue.o chacha-p10le-8x.o

View File

@ -1,14 +1,12 @@
// SPDX-License-Identifier: GPL-2.0-or-later /* SPDX-License-Identifier: GPL-2.0-or-later */
/* /*
* ChaCha stream cipher (P10 accelerated) * ChaCha stream cipher (P10 accelerated)
* *
* Copyright 2023- IBM Corp. All rights reserved. * Copyright 2023- IBM Corp. All rights reserved.
*/ */
#include <crypto/chacha.h>
#include <crypto/internal/simd.h> #include <crypto/internal/simd.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h>
#include <linux/cpufeature.h> #include <linux/cpufeature.h>
#include <linux/sizes.h> #include <linux/sizes.h>
#include <asm/simd.h> #include <asm/simd.h>
@ -48,15 +46,10 @@ static void chacha_p10_do_8x(struct chacha_state *state, u8 *dst, const u8 *src,
chacha_crypt_generic(state, dst, src, bytes, nrounds); chacha_crypt_generic(state, dst, src, bytes, nrounds);
} }
void hchacha_block_arch(const struct chacha_state *state, #define hchacha_block_arch hchacha_block_generic /* not implemented yet */
u32 out[HCHACHA_OUT_WORDS], int nrounds)
{
hchacha_block_generic(state, out, nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
unsigned int bytes, int nrounds) const u8 *src, unsigned int bytes, int nrounds)
{ {
if (!static_branch_likely(&have_p10) || bytes <= CHACHA_BLOCK_SIZE || if (!static_branch_likely(&have_p10) || bytes <= CHACHA_BLOCK_SIZE ||
!crypto_simd_usable()) !crypto_simd_usable())
@ -74,21 +67,10 @@ void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
dst += todo; dst += todo;
} while (bytes); } while (bytes);
} }
EXPORT_SYMBOL(chacha_crypt_arch);
static int __init chacha_p10_init(void) #define chacha_mod_init_arch chacha_mod_init_arch
static void chacha_mod_init_arch(void)
{ {
if (cpu_has_feature(CPU_FTR_ARCH_31)) if (cpu_has_feature(CPU_FTR_ARCH_31))
static_branch_enable(&have_p10); static_branch_enable(&have_p10);
return 0;
} }
subsys_initcall(chacha_p10_init);
static void __exit chacha_p10_exit(void)
{
}
module_exit(chacha_p10_exit);
MODULE_DESCRIPTION("ChaCha stream cipher (P10 accelerated)");
MODULE_AUTHOR("Danny Tsen <dtsen@linux.ibm.com>");
MODULE_LICENSE("GPL v2");

View File

@ -1,8 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
config CRYPTO_CHACHA_RISCV64
tristate
depends on 64BIT && RISCV_ISA_V && TOOLCHAIN_HAS_VECTOR_CRYPTO
default CRYPTO_LIB_CHACHA
select CRYPTO_ARCH_HAVE_LIB_CHACHA
select CRYPTO_LIB_CHACHA_GENERIC

View File

@ -1,4 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
obj-$(CONFIG_CRYPTO_CHACHA_RISCV64) += chacha-riscv64.o
chacha-riscv64-y := chacha-riscv64-glue.o chacha-riscv64-zvkb.o

View File

@ -1,4 +1,4 @@
// SPDX-License-Identifier: GPL-2.0-only /* SPDX-License-Identifier: GPL-2.0-only */
/* /*
* ChaCha stream cipher (RISC-V optimized) * ChaCha stream cipher (RISC-V optimized)
* *
@ -8,25 +8,18 @@
#include <asm/simd.h> #include <asm/simd.h>
#include <asm/vector.h> #include <asm/vector.h>
#include <crypto/chacha.h>
#include <crypto/internal/simd.h> #include <crypto/internal/simd.h>
#include <linux/linkage.h> #include <linux/linkage.h>
#include <linux/module.h>
static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_zvkb); static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_zvkb);
asmlinkage void chacha_zvkb(struct chacha_state *state, const u8 *in, u8 *out, asmlinkage void chacha_zvkb(struct chacha_state *state, const u8 *in, u8 *out,
size_t nblocks, int nrounds); size_t nblocks, int nrounds);
void hchacha_block_arch(const struct chacha_state *state, #define hchacha_block_arch hchacha_block_generic /* not implemented yet */
u32 out[HCHACHA_OUT_WORDS], int nrounds)
{
hchacha_block_generic(state, out, nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
unsigned int bytes, int nrounds) const u8 *src, unsigned int bytes, int nrounds)
{ {
u8 block_buffer[CHACHA_BLOCK_SIZE]; u8 block_buffer[CHACHA_BLOCK_SIZE];
unsigned int full_blocks = bytes / CHACHA_BLOCK_SIZE; unsigned int full_blocks = bytes / CHACHA_BLOCK_SIZE;
@ -48,22 +41,11 @@ void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
} }
kernel_vector_end(); kernel_vector_end();
} }
EXPORT_SYMBOL(chacha_crypt_arch);
static int __init riscv64_chacha_mod_init(void) #define chacha_mod_init_arch chacha_mod_init_arch
static void chacha_mod_init_arch(void)
{ {
if (riscv_isa_extension_available(NULL, ZVKB) && if (riscv_isa_extension_available(NULL, ZVKB) &&
riscv_vector_vlen() >= 128) riscv_vector_vlen() >= 128)
static_branch_enable(&use_zvkb); static_branch_enable(&use_zvkb);
return 0;
} }
subsys_initcall(riscv64_chacha_mod_init);
static void __exit riscv64_chacha_mod_exit(void)
{
}
module_exit(riscv64_chacha_mod_exit);
MODULE_DESCRIPTION("ChaCha stream cipher (RISC-V optimized)");
MODULE_AUTHOR("Jerry Shih <jerry.shih@sifive.com>");
MODULE_LICENSE("GPL");

View File

@ -1,7 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
config CRYPTO_CHACHA_S390
tristate
default CRYPTO_LIB_CHACHA
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_ARCH_HAVE_LIB_CHACHA

View File

@ -1,4 +0,0 @@
# SPDX-License-Identifier: GPL-2.0-only
obj-$(CONFIG_CRYPTO_CHACHA_S390) += chacha_s390.o
chacha_s390-y := chacha-glue.o chacha-s390.o

View File

@ -1,32 +1,21 @@
// SPDX-License-Identifier: GPL-2.0 /* SPDX-License-Identifier: GPL-2.0 */
/* /*
* ChaCha stream cipher (s390 optimized) * ChaCha stream cipher (s390 optimized)
* *
* Copyright IBM Corp. 2021 * Copyright IBM Corp. 2021
*/ */
#define KMSG_COMPONENT "chacha_s390"
#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
#include <crypto/chacha.h>
#include <linux/cpufeature.h> #include <linux/cpufeature.h>
#include <linux/export.h> #include <linux/export.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h>
#include <linux/sizes.h> #include <linux/sizes.h>
#include <asm/fpu.h> #include <asm/fpu.h>
#include "chacha-s390.h" #include "chacha-s390.h"
void hchacha_block_arch(const struct chacha_state *state, #define hchacha_block_arch hchacha_block_generic /* not implemented yet */
u32 out[HCHACHA_OUT_WORDS], int nrounds)
{
/* TODO: implement hchacha_block_arch() in assembly */
hchacha_block_generic(state, out, nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
unsigned int bytes, int nrounds) const u8 *src, unsigned int bytes, int nrounds)
{ {
/* s390 chacha20 implementation has 20 rounds hard-coded, /* s390 chacha20 implementation has 20 rounds hard-coded,
* it cannot handle a block of data or less, but otherwise * it cannot handle a block of data or less, but otherwise
@ -45,7 +34,3 @@ void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
CHACHA_BLOCK_SIZE; CHACHA_BLOCK_SIZE;
} }
} }
EXPORT_SYMBOL(chacha_crypt_arch);
MODULE_DESCRIPTION("ChaCha stream cipher (s390 optimized)");
MODULE_LICENSE("GPL v2");

View File

@ -11,10 +11,3 @@ config CRYPTO_BLAKE2S_X86
Architecture: x86_64 using: Architecture: x86_64 using:
- SSSE3 (Supplemental SSE3) - SSSE3 (Supplemental SSE3)
- AVX-512 (Advanced Vector Extensions-512) - AVX-512 (Advanced Vector Extensions-512)
config CRYPTO_CHACHA20_X86_64
tristate
depends on 64BIT
default CRYPTO_LIB_CHACHA
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_ARCH_HAVE_LIB_CHACHA

View File

@ -2,6 +2,3 @@
obj-$(CONFIG_CRYPTO_BLAKE2S_X86) += libblake2s-x86_64.o obj-$(CONFIG_CRYPTO_BLAKE2S_X86) += libblake2s-x86_64.o
libblake2s-x86_64-y := blake2s-core.o blake2s-glue.o libblake2s-x86_64-y := blake2s-core.o blake2s-glue.o
obj-$(CONFIG_CRYPTO_CHACHA20_X86_64) += chacha-x86_64.o
chacha-x86_64-y := chacha-avx2-x86_64.o chacha-ssse3-x86_64.o chacha-avx512vl-x86_64.o chacha_glue.o

View File

@ -1,4 +1,4 @@
// SPDX-License-Identifier: GPL-2.0-or-later /* SPDX-License-Identifier: GPL-2.0-or-later */
/* /*
* ChaCha and HChaCha functions (x86_64 optimized) * ChaCha and HChaCha functions (x86_64 optimized)
* *
@ -6,10 +6,8 @@
*/ */
#include <asm/simd.h> #include <asm/simd.h>
#include <crypto/chacha.h>
#include <linux/jump_label.h> #include <linux/jump_label.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h>
#include <linux/sizes.h> #include <linux/sizes.h>
asmlinkage void chacha_block_xor_ssse3(const struct chacha_state *state, asmlinkage void chacha_block_xor_ssse3(const struct chacha_state *state,
@ -126,8 +124,8 @@ static void chacha_dosimd(struct chacha_state *state, u8 *dst, const u8 *src,
} }
} }
void hchacha_block_arch(const struct chacha_state *state, static void hchacha_block_arch(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds) u32 out[HCHACHA_OUT_WORDS], int nrounds)
{ {
if (!static_branch_likely(&chacha_use_simd)) { if (!static_branch_likely(&chacha_use_simd)) {
hchacha_block_generic(state, out, nrounds); hchacha_block_generic(state, out, nrounds);
@ -137,10 +135,9 @@ void hchacha_block_arch(const struct chacha_state *state,
kernel_fpu_end(); kernel_fpu_end();
} }
} }
EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src, static void chacha_crypt_arch(struct chacha_state *state, u8 *dst,
unsigned int bytes, int nrounds) const u8 *src, unsigned int bytes, int nrounds)
{ {
if (!static_branch_likely(&chacha_use_simd) || if (!static_branch_likely(&chacha_use_simd) ||
bytes <= CHACHA_BLOCK_SIZE) bytes <= CHACHA_BLOCK_SIZE)
@ -158,12 +155,12 @@ void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
dst += todo; dst += todo;
} while (bytes); } while (bytes);
} }
EXPORT_SYMBOL(chacha_crypt_arch);
static int __init chacha_simd_mod_init(void) #define chacha_mod_init_arch chacha_mod_init_arch
static void chacha_mod_init_arch(void)
{ {
if (!boot_cpu_has(X86_FEATURE_SSSE3)) if (!boot_cpu_has(X86_FEATURE_SSSE3))
return 0; return;
static_branch_enable(&chacha_use_simd); static_branch_enable(&chacha_use_simd);
@ -176,15 +173,4 @@ static int __init chacha_simd_mod_init(void)
boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */ boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
static_branch_enable(&chacha_use_avx512vl); static_branch_enable(&chacha_use_avx512vl);
} }
return 0;
} }
subsys_initcall(chacha_simd_mod_init);
static void __exit chacha_simd_mod_exit(void)
{
}
module_exit(chacha_simd_mod_exit);
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
MODULE_DESCRIPTION("ChaCha and HChaCha functions (x86_64 optimized)");