Fixes: out of array access Fixes: 29345/clusterfuzz-testcase-minimized-ffmpeg_AV_CODEC_ID_HAP_fuzzer-5401813482340352 Fixes: 30745/clusterfuzz-testcase-minimized-ffmpeg_AV_CODEC_ID_HAP_fuzzer-5762798221131776
Suggested-by: Anton Found-by: continuous fuzzing process https://github.com/google/oss-fuzz/tree/master/projects/ffmpeg Signed-off-by: Michael Niedermayer <mich...@niedermayer.cc> --- libavcodec/hap.h | 2 +- libavcodec/hapdec.c | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/libavcodec/hap.h b/libavcodec/hap.h index bbeed11e32..00c3dbb32d 100644 --- a/libavcodec/hap.h +++ b/libavcodec/hap.h @@ -52,7 +52,7 @@ enum HapSectionType { typedef struct HapChunk { enum HapCompressor compressor; - int compressed_offset; + uint32_t compressed_offset; size_t compressed_size; int uncompressed_offset; size_t uncompressed_size; diff --git a/libavcodec/hapdec.c b/libavcodec/hapdec.c index ab364aa790..692bb6e750 100644 --- a/libavcodec/hapdec.c +++ b/libavcodec/hapdec.c @@ -105,6 +105,8 @@ static int hap_parse_decode_instructions(HapContext *ctx, int size) size_t running_size = 0; for (i = 0; i < ctx->chunk_count; i++) { ctx->chunks[i].compressed_offset = running_size; + if (ctx->chunks[i].compressed_size > UINT32_MAX - running_size) + return AVERROR_INVALIDDATA; running_size += ctx->chunks[i].compressed_size; } } @@ -186,7 +188,7 @@ static int hap_parse_frame_header(AVCodecContext *avctx) HapChunk *chunk = &ctx->chunks[i]; /* Check the compressed buffer is valid */ - if (chunk->compressed_offset + chunk->compressed_size > bytestream2_get_bytes_left(gbc)) + if (chunk->compressed_offset + (uint64_t)chunk->compressed_size > bytestream2_get_bytes_left(gbc)) return AVERROR_INVALIDDATA; /* Chunks are unpacked sequentially, ctx->tex_size is the uncompressed -- 2.17.1 _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".