crypto: x86/poly1305 - remove redundant shash algorithm

Since crypto/poly1305.c now registers a poly1305-$(ARCH) shash algorithm
that uses the architecture's Poly1305 library functions, individual
architectures no longer need to do the same.  Therefore, remove the
redundant shash algorithm from the arch-specific code and leave just the
library functions there.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Eric Biggers 2025-04-12 21:54:19 -07:00 committed by Herbert Xu
parent 632dcef5bb
commit 21969da642
2 changed files with 7 additions and 97 deletions

View File

@ -399,16 +399,9 @@ config CRYPTO_POLYVAL_CLMUL_NI
config CRYPTO_POLY1305_X86_64
tristate
depends on X86 && 64BIT
select CRYPTO_HASH
select CRYPTO_LIB_POLY1305_GENERIC
select CRYPTO_ARCH_HAVE_LIB_POLY1305
default CRYPTO_LIB_POLY1305_INTERNAL
help
Poly1305 authenticator algorithm (RFC7539)
Architecture: x86_64 using:
- SSE2 (Streaming SIMD Extensions 2)
- AVX2 (Advanced Vector Extensions 2)
config CRYPTO_SHA1_SSSE3
tristate "Hash functions: SHA-1 (SSSE3/AVX/AVX2/SHA-NI)"

View File

@ -3,15 +3,13 @@
* Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
*/
#include <crypto/algapi.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/poly1305.h>
#include <crypto/internal/simd.h>
#include <linux/crypto.h>
#include <crypto/poly1305.h>
#include <linux/jump_label.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/sizes.h>
#include <linux/unaligned.h>
#include <asm/cpu_device_id.h>
#include <asm/simd.h>
@ -137,38 +135,13 @@ void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KE
dctx->s[2] = get_unaligned_le32(&key[24]);
dctx->s[3] = get_unaligned_le32(&key[28]);
dctx->buflen = 0;
dctx->sset = true;
}
EXPORT_SYMBOL(poly1305_init_arch);
static unsigned int crypto_poly1305_setdctxkey(struct poly1305_desc_ctx *dctx,
const u8 *inp, unsigned int len)
{
unsigned int acc = 0;
if (unlikely(!dctx->sset)) {
if (!dctx->rset && len >= POLY1305_BLOCK_SIZE) {
poly1305_simd_init(&dctx->h, inp);
inp += POLY1305_BLOCK_SIZE;
len -= POLY1305_BLOCK_SIZE;
acc += POLY1305_BLOCK_SIZE;
dctx->rset = 1;
}
if (len >= POLY1305_BLOCK_SIZE) {
dctx->s[0] = get_unaligned_le32(&inp[0]);
dctx->s[1] = get_unaligned_le32(&inp[4]);
dctx->s[2] = get_unaligned_le32(&inp[8]);
dctx->s[3] = get_unaligned_le32(&inp[12]);
acc += POLY1305_BLOCK_SIZE;
dctx->sset = true;
}
}
return acc;
}
void poly1305_update_arch(struct poly1305_desc_ctx *dctx, const u8 *src,
unsigned int srclen)
{
unsigned int bytes, used;
unsigned int bytes;
if (unlikely(dctx->buflen)) {
bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
@ -178,19 +151,16 @@ void poly1305_update_arch(struct poly1305_desc_ctx *dctx, const u8 *src,
dctx->buflen += bytes;
if (dctx->buflen == POLY1305_BLOCK_SIZE) {
if (likely(!crypto_poly1305_setdctxkey(dctx, dctx->buf, POLY1305_BLOCK_SIZE)))
poly1305_simd_blocks(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 1);
poly1305_simd_blocks(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 1);
dctx->buflen = 0;
}
}
if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
bytes = round_down(srclen, POLY1305_BLOCK_SIZE);
srclen -= bytes;
used = crypto_poly1305_setdctxkey(dctx, src, bytes);
if (likely(bytes - used))
poly1305_simd_blocks(&dctx->h, src + used, bytes - used, 1);
poly1305_simd_blocks(&dctx->h, src, bytes, 1);
src += bytes;
srclen -= bytes;
}
if (unlikely(srclen)) {
@ -214,49 +184,6 @@ void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst)
}
EXPORT_SYMBOL(poly1305_final_arch);
static int crypto_poly1305_init(struct shash_desc *desc)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
*dctx = (struct poly1305_desc_ctx){};
return 0;
}
static int crypto_poly1305_update(struct shash_desc *desc,
const u8 *src, unsigned int srclen)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
poly1305_update_arch(dctx, src, srclen);
return 0;
}
static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
if (unlikely(!dctx->sset))
return -ENOKEY;
poly1305_final_arch(dctx, dst);
return 0;
}
static struct shash_alg alg = {
.digestsize = POLY1305_DIGEST_SIZE,
.init = crypto_poly1305_init,
.update = crypto_poly1305_update,
.final = crypto_poly1305_final,
.descsize = sizeof(struct poly1305_desc_ctx),
.base = {
.cra_name = "poly1305",
.cra_driver_name = "poly1305-simd",
.cra_priority = 300,
.cra_blocksize = POLY1305_BLOCK_SIZE,
.cra_module = THIS_MODULE,
},
};
bool poly1305_is_arch_optimized(void)
{
return static_key_enabled(&poly1305_use_avx);
@ -277,20 +204,10 @@ static int __init poly1305_simd_mod_init(void)
/* Skylake downclocks unacceptably much when using zmm, but later generations are fast. */
boot_cpu_data.x86_vfm != INTEL_SKYLAKE_X)
static_branch_enable(&poly1305_use_avx512);
return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
return 0;
}
static void __exit poly1305_simd_mod_exit(void)
{
if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
crypto_unregister_shash(&alg);
}
arch_initcall(poly1305_simd_mod_init);
module_exit(poly1305_simd_mod_exit);
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Jason A. Donenfeld <Jason@zx2c4.com>");
MODULE_DESCRIPTION("Poly1305 authenticator");
MODULE_ALIAS_CRYPTO("poly1305");
MODULE_ALIAS_CRYPTO("poly1305-simd");