linux-yocto/include/crypto/sha256_base.h
Breno Leitao 5d646a7632 crypto: sha256 - fix crash at kexec
Loading a large (~2.1G) files with kexec crashes the host with when
running:

  # kexec --load kernel --initrd initrd_with_2G_or_more

  UBSAN: signed-integer-overflow in ./include/crypto/sha256_base.h:64:19
  34152083 * 64 cannot be represented in type 'int'
  ...
  BUG: unable to handle page fault for address: ff9fffff83b624c0
  sha256_update (lib/crypto/sha256.c:137)
  crypto_sha256_update (crypto/sha256_generic.c:40)
  kexec_calculate_store_digests (kernel/kexec_file.c:769)
  __se_sys_kexec_file_load (kernel/kexec_file.c:397 kernel/kexec_file.c:332)
  ...

(Line numbers based on commit da274362a7 ("Linux 6.12.49")

This started happening after commit f4da7afe07
("kexec_file: increase maximum file size to 4G") that landed in v6.0,
which increased the file size for kexec.

This is not happening upstream (v6.16+), given that `block` type was
upgraded from "int" to "size_t" in commit 74a43a2cf5 ("crypto:
lib/sha256 - Move partial block handling out")

Upgrade the block type similar to the commit above, avoiding hitting the
overflow.

This patch is only suitable for the stable tree, and before 6.16, which
got commit 74a43a2cf5 ("crypto: lib/sha256 - Move partial block
handling out"). This is not required before f4da7afe07 ("kexec_file:
increase maximum file size to 4G"). In other words, this fix is required
between versions v6.0 and v6.16.

Signed-off-by: Breno Leitao <leitao@debian.org>
Fixes: f4da7afe07 ("kexec_file: increase maximum file size to 4G") # Before v6.16
Reported-by: Michael van der Westhuizen <rmikey@meta.com>
Reported-by: Tobias Fleig <tfleig@meta.com>
Reviewed-by: Eric Biggers <ebiggers@kernel.org>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
2025-10-06 11:16:59 +02:00

136 lines
3.1 KiB
C

/* SPDX-License-Identifier: GPL-2.0-only */
/*
* sha256_base.h - core logic for SHA-256 implementations
*
* Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
*/
#ifndef _CRYPTO_SHA256_BASE_H
#define _CRYPTO_SHA256_BASE_H
#include <asm/byteorder.h>
#include <asm/unaligned.h>
#include <crypto/internal/hash.h>
#include <crypto/sha2.h>
#include <linux/string.h>
#include <linux/types.h>
typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src,
int blocks);
static inline int sha224_base_init(struct shash_desc *desc)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
sha224_init(sctx);
return 0;
}
static inline int sha256_base_init(struct shash_desc *desc)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
sha256_init(sctx);
return 0;
}
static inline int lib_sha256_base_do_update(struct sha256_state *sctx,
const u8 *data,
unsigned int len,
sha256_block_fn *block_fn)
{
unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
sctx->count += len;
if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) {
unsigned int blocks;
if (partial) {
int p = SHA256_BLOCK_SIZE - partial;
memcpy(sctx->buf + partial, data, p);
data += p;
len -= p;
block_fn(sctx, sctx->buf, 1);
}
blocks = len / SHA256_BLOCK_SIZE;
len %= SHA256_BLOCK_SIZE;
if (blocks) {
block_fn(sctx, data, blocks);
data += blocks * SHA256_BLOCK_SIZE;
}
partial = 0;
}
if (len)
memcpy(sctx->buf + partial, data, len);
return 0;
}
static inline int sha256_base_do_update(struct shash_desc *desc,
const u8 *data,
unsigned int len,
sha256_block_fn *block_fn)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
return lib_sha256_base_do_update(sctx, data, len, block_fn);
}
static inline int lib_sha256_base_do_finalize(struct sha256_state *sctx,
sha256_block_fn *block_fn)
{
const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64);
__be64 *bits = (__be64 *)(sctx->buf + bit_offset);
unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
sctx->buf[partial++] = 0x80;
if (partial > bit_offset) {
memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial);
partial = 0;
block_fn(sctx, sctx->buf, 1);
}
memset(sctx->buf + partial, 0x0, bit_offset - partial);
*bits = cpu_to_be64(sctx->count << 3);
block_fn(sctx, sctx->buf, 1);
return 0;
}
static inline int sha256_base_do_finalize(struct shash_desc *desc,
sha256_block_fn *block_fn)
{
struct sha256_state *sctx = shash_desc_ctx(desc);
return lib_sha256_base_do_finalize(sctx, block_fn);
}
static inline int lib_sha256_base_finish(struct sha256_state *sctx, u8 *out,
unsigned int digest_size)
{
__be32 *digest = (__be32 *)out;
int i;
for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32))
put_unaligned_be32(sctx->state[i], digest++);
memzero_explicit(sctx, sizeof(*sctx));
return 0;
}
static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
{
unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
struct sha256_state *sctx = shash_desc_ctx(desc);
return lib_sha256_base_finish(sctx, out, digest_size);
}
#endif /* _CRYPTO_SHA256_BASE_H */