aboutsummaryrefslogtreecommitdiff
path: root/lib/crypto
diff options
context:
space:
mode:
authorEric Biggers2020-12-23 00:09:53 -0800
committerHerbert Xu2021-01-03 08:41:38 +1100
commit057edc9c8bb2d5ff5b058b521792c392428a0714 (patch)
treec9eef3ec1b8c2291af5b0ed0dec286a6efac251c /lib/crypto
parentdf412e7efda1e2c5b5fcb06701bba77434cbd1e8 (diff)
crypto: blake2s - move update and final logic to internal/blake2s.h
Move most of blake2s_update() and blake2s_final() into new inline functions __blake2s_update() and __blake2s_final() in include/crypto/internal/blake2s.h so that this logic can be shared by the shash helper functions. This will avoid duplicating this logic between the library and shash implementations. Signed-off-by: Eric Biggers <ebiggers@google.com> Acked-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'lib/crypto')
-rw-r--r--lib/crypto/blake2s.c48
1 files changed, 8 insertions, 40 deletions
diff --git a/lib/crypto/blake2s.c b/lib/crypto/blake2s.c
index 6a4b6b78d630..c64ac8bfb6a9 100644
--- a/lib/crypto/blake2s.c
+++ b/lib/crypto/blake2s.c
@@ -15,55 +15,23 @@
#include <linux/module.h>
#include <linux/init.h>
#include <linux/bug.h>
-#include <asm/unaligned.h>
+
+#if IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S)
+# define blake2s_compress blake2s_compress_arch
+#else
+# define blake2s_compress blake2s_compress_generic
+#endif
void blake2s_update(struct blake2s_state *state, const u8 *in, size_t inlen)
{
- const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen;
-
- if (unlikely(!inlen))
- return;
- if (inlen > fill) {
- memcpy(state->buf + state->buflen, in, fill);
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S))
- blake2s_compress_arch(state, state->buf, 1,
- BLAKE2S_BLOCK_SIZE);
- else
- blake2s_compress_generic(state, state->buf, 1,
- BLAKE2S_BLOCK_SIZE);
- state->buflen = 0;
- in += fill;
- inlen -= fill;
- }
- if (inlen > BLAKE2S_BLOCK_SIZE) {
- const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
- /* Hash one less (full) block than strictly possible */
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S))
- blake2s_compress_arch(state, in, nblocks - 1,
- BLAKE2S_BLOCK_SIZE);
- else
- blake2s_compress_generic(state, in, nblocks - 1,
- BLAKE2S_BLOCK_SIZE);
- in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
- inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
- }
- memcpy(state->buf + state->buflen, in, inlen);
- state->buflen += inlen;
+ __blake2s_update(state, in, inlen, blake2s_compress);
}
EXPORT_SYMBOL(blake2s_update);
void blake2s_final(struct blake2s_state *state, u8 *out)
{
WARN_ON(IS_ENABLED(DEBUG) && !out);
- blake2s_set_lastblock(state);
- memset(state->buf + state->buflen, 0,
- BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_BLAKE2S))
- blake2s_compress_arch(state, state->buf, 1, state->buflen);
- else
- blake2s_compress_generic(state, state->buf, 1, state->buflen);
- cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));
- memcpy(out, state->h, state->outlen);
+ __blake2s_final(state, out, blake2s_compress);
memzero_explicit(state, sizeof(*state));
}
EXPORT_SYMBOL(blake2s_final);