crypto: x86/chacha - Remove SIMD fallback path

Get rid of the fallback path as SIMD is now always usable in softirq
context.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
pull/1250/head
Herbert Xu 2025-04-03 12:14:50 +08:00
parent f98ed0dd58
commit 9b4400215e
1 changed files with 11 additions and 35 deletions

View File

@ -6,9 +6,7 @@
* Copyright (C) 2015 Martin Willi * Copyright (C) 2015 Martin Willi
*/ */
#include <crypto/algapi.h>
#include <crypto/internal/chacha.h> #include <crypto/internal/chacha.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h> #include <crypto/internal/skcipher.h>
#include <linux/kernel.h> #include <linux/kernel.h>
#include <linux/module.h> #include <linux/module.h>
@ -35,7 +33,6 @@ asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
unsigned int len, int nrounds); unsigned int len, int nrounds);
static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd);
static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2); static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2);
static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl); static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl);
@ -123,23 +120,15 @@ static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
{ {
if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) { kernel_fpu_begin();
hchacha_block_generic(state, stream, nrounds); hchacha_block_ssse3(state, stream, nrounds);
} else { kernel_fpu_end();
kernel_fpu_begin();
hchacha_block_ssse3(state, stream, nrounds);
kernel_fpu_end();
}
} }
EXPORT_SYMBOL(hchacha_block_arch); EXPORT_SYMBOL(hchacha_block_arch);
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds) int nrounds)
{ {
if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() ||
bytes <= CHACHA_BLOCK_SIZE)
return chacha_crypt_generic(state, dst, src, bytes, nrounds);
do { do {
unsigned int todo = min_t(unsigned int, bytes, SZ_4K); unsigned int todo = min_t(unsigned int, bytes, SZ_4K);
@ -171,18 +160,11 @@ static int chacha_simd_stream_xor(struct skcipher_request *req,
if (nbytes < walk.total) if (nbytes < walk.total)
nbytes = round_down(nbytes, walk.stride); nbytes = round_down(nbytes, walk.stride);
if (!static_branch_likely(&chacha_use_simd) || kernel_fpu_begin();
!crypto_simd_usable()) { chacha_dosimd(state, walk.dst.virt.addr,
chacha_crypt_generic(state, walk.dst.virt.addr, walk.src.virt.addr, nbytes,
walk.src.virt.addr, nbytes, ctx->nrounds);
ctx->nrounds); kernel_fpu_end();
} else {
kernel_fpu_begin();
chacha_dosimd(state, walk.dst.virt.addr,
walk.src.virt.addr, nbytes,
ctx->nrounds);
kernel_fpu_end();
}
err = skcipher_walk_done(&walk, walk.nbytes - nbytes); err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
} }
@ -207,13 +189,9 @@ static int xchacha_simd(struct skcipher_request *req)
chacha_init(state, ctx->key, req->iv); chacha_init(state, ctx->key, req->iv);
if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) { kernel_fpu_begin();
kernel_fpu_begin(); hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
hchacha_block_ssse3(state, subctx.key, ctx->nrounds); kernel_fpu_end();
kernel_fpu_end();
} else {
hchacha_block_generic(state, subctx.key, ctx->nrounds);
}
subctx.nrounds = ctx->nrounds; subctx.nrounds = ctx->nrounds;
memcpy(&real_iv[0], req->iv + 24, 8); memcpy(&real_iv[0], req->iv + 24, 8);
@ -275,8 +253,6 @@ static int __init chacha_simd_mod_init(void)
if (!boot_cpu_has(X86_FEATURE_SSSE3)) if (!boot_cpu_has(X86_FEATURE_SSSE3))
return 0; return 0;
static_branch_enable(&chacha_use_simd);
if (boot_cpu_has(X86_FEATURE_AVX) && if (boot_cpu_has(X86_FEATURE_AVX) &&
boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_AVX2) &&
cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {