diff options
author | Remi Collet <remi@remirepo.net> | 2023-09-28 15:19:44 +0200 |
---|---|---|
committer | Remi Collet <remi@php.net> | 2023-09-28 15:19:44 +0200 |
commit | 96889ef71d873655037141792485f64724372c45 (patch) | |
tree | be19ea7bdbe99a3198157468d58899ea556b77e9 | |
parent | addce67fb4d8a2ab9b373e6a1566b83cb0569b99 (diff) |
add upstream patched with fix for CVE-2023-4863
-rw-r--r-- | libwebp-upstream.patch | 765 | ||||
-rw-r--r-- | libwebp.spec | 12 |
2 files changed, 775 insertions, 2 deletions
diff --git a/libwebp-upstream.patch b/libwebp-upstream.patch new file mode 100644 index 0000000..8f99130 --- /dev/null +++ b/libwebp-upstream.patch @@ -0,0 +1,765 @@ +From 8bac4561bf724ae7ad04f0709c205774a804ccda Mon Sep 17 00:00:00 2001 +From: Skal <skal@google.com> +Date: Fri, 30 Aug 2019 14:13:28 +0200 +Subject: [PATCH 1/9] bugfix: last alpha rows were incorrectly decoded + +sometimes, the last rows of the alpha plane contain more than NUM_ARGB_CACHE_ROWS +rows to process. But ExtractAlphaRows() was repeatedly calling ApplyInverseTransforms() +without updating the dec->last_row_ field, which is the starting row used as starting +point. + +Fix would consist of either updating correctly dec->last_row_ before calling +ApplyInverseTransforms(). Or pass the starting row explicitly, which is simpler. + +BUG=webp:439 + +Change-Id: Id99f2c28662d02b2b866cb79e666050be9d59e04 +(cherry picked from commit 0e48d889eb90363c6ebf8ea276bae4d892f9d3c0) +--- + src/dec/vp8l_dec.c | 10 +++++----- + 1 file changed, 5 insertions(+), 5 deletions(-) + +diff --git a/src/dec/vp8l_dec.c b/src/dec/vp8l_dec.c +index d3e27119..93615d4e 100644 +--- a/src/dec/vp8l_dec.c ++++ b/src/dec/vp8l_dec.c +@@ -754,11 +754,11 @@ static WEBP_INLINE HTreeGroup* GetHtreeGroupForPos(VP8LMetadata* const hdr, + + typedef void (*ProcessRowsFunc)(VP8LDecoder* const dec, int row); + +-static void ApplyInverseTransforms(VP8LDecoder* const dec, int num_rows, ++static void ApplyInverseTransforms(VP8LDecoder* const dec, ++ int start_row, int num_rows, + const uint32_t* const rows) { + int n = dec->next_transform_; + const int cache_pixs = dec->width_ * num_rows; +- const int start_row = dec->last_row_; + const int end_row = start_row + num_rows; + const uint32_t* rows_in = rows; + uint32_t* const rows_out = dec->argb_cache_; +@@ -789,8 +789,7 @@ static void ProcessRows(VP8LDecoder* const dec, int row) { + VP8Io* const io = dec->io_; + uint8_t* rows_data = (uint8_t*)dec->argb_cache_; + const int in_stride = io->width * sizeof(uint32_t); // in unit of RGBA +- +- ApplyInverseTransforms(dec, num_rows, rows); ++ ApplyInverseTransforms(dec, dec->last_row_, num_rows, rows); + if (!SetCropWindow(io, dec->last_row_, row, &rows_data, in_stride)) { + // Nothing to output (this time). + } else { +@@ -1193,6 +1192,7 @@ static int DecodeImageData(VP8LDecoder* const dec, uint32_t* const data, + VP8LFillBitWindow(br); + dist_code = GetCopyDistance(dist_symbol, br); + dist = PlaneCodeToDistance(width, dist_code); ++ + if (VP8LIsEndOfStream(br)) break; + if (src - data < (ptrdiff_t)dist || src_end - src < (ptrdiff_t)length) { + goto Error; +@@ -1553,7 +1553,7 @@ static void ExtractAlphaRows(VP8LDecoder* const dec, int last_row) { + const int cache_pixs = width * num_rows_to_process; + uint8_t* const dst = output + width * cur_row; + const uint32_t* const src = dec->argb_cache_; +- ApplyInverseTransforms(dec, num_rows_to_process, in); ++ ApplyInverseTransforms(dec, cur_row, num_rows_to_process, in); + WebPExtractGreen(src, dst, cache_pixs); + AlphaApplyFilter(alph_dec, + cur_row, cur_row + num_rows_to_process, dst, width); +-- +2.41.0 + +From 7cc4fd61151dd4795a1e1de3cad8d41df1ffacea Mon Sep 17 00:00:00 2001 +From: Vincent Rabaud <vrabaud@google.com> +Date: Tue, 1 Mar 2022 13:38:29 +0100 +Subject: [PATCH 2/9] Fix lossless encoding for MIPS. + +Bug: webp:558 +Change-Id: I3d3ddb64ed26a8d8ff5664664c5f20f6eadfeb4f +(cherry picked from commit e4cbcdd2b5ff33a64f97fe49d67fb56f915657e8) +--- + src/dsp/lossless_enc_mips32.c | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/src/dsp/lossless_enc_mips32.c b/src/dsp/lossless_enc_mips32.c +index 0412a093..99630517 100644 +--- a/src/dsp/lossless_enc_mips32.c ++++ b/src/dsp/lossless_enc_mips32.c +@@ -347,24 +347,24 @@ static void GetCombinedEntropyUnrefined_MIPS32(const uint32_t X[], + static void AddVector_MIPS32(const uint32_t* pa, const uint32_t* pb, + uint32_t* pout, int size) { + uint32_t temp0, temp1, temp2, temp3, temp4, temp5, temp6, temp7; +- const uint32_t end = ((size) / 4) * 4; ++ const int end = ((size) / 4) * 4; + const uint32_t* const LoopEnd = pa + end; + int i; + ASM_START + ADD_TO_OUT(0, 4, 8, 12, 1, pa, pb, pout) + ASM_END_0 +- for (i = end; i < size; ++i) pout[i] = pa[i] + pb[i]; ++ for (i = 0; i < size - end; ++i) pout[i] = pa[i] + pb[i]; + } + + static void AddVectorEq_MIPS32(const uint32_t* pa, uint32_t* pout, int size) { + uint32_t temp0, temp1, temp2, temp3, temp4, temp5, temp6, temp7; +- const uint32_t end = ((size) / 4) * 4; ++ const int end = ((size) / 4) * 4; + const uint32_t* const LoopEnd = pa + end; + int i; + ASM_START + ADD_TO_OUT(0, 4, 8, 12, 0, pa, pout, pout) + ASM_END_1 +- for (i = end; i < size; ++i) pout[i] += pa[i]; ++ for (i = 0; i < size - end; ++i) pout[i] += pa[i]; + } + + #undef ASM_END_1 +-- +2.41.0 + +From 34e5da5d29b20d09149b1f166fe846ebfef5ed06 Mon Sep 17 00:00:00 2001 +From: James Zern <jzern@google.com> +Date: Mon, 28 Feb 2022 19:46:52 +0000 +Subject: [PATCH 3/9] alpha_processing_neon.c: fix Dispatch/ExtractAlpha_NEON + +the trailing width % 8 bytes would clear the upper bytes of +alpha_mask as they're done one at a time + +since: +49d0280d NEON: implement several alpha-processing functions + +Change-Id: Iff76c0af3094597285a6aa6ed032b345f9856aae +(cherry picked from commit 924e7ca6540d1ac0a2b1b92ee094a64391de0c09) +--- + src/dsp/alpha_processing_neon.c | 6 ++++-- + 1 file changed, 4 insertions(+), 2 deletions(-) + +diff --git a/src/dsp/alpha_processing_neon.c b/src/dsp/alpha_processing_neon.c +index 9d554217..50f08989 100644 +--- a/src/dsp/alpha_processing_neon.c ++++ b/src/dsp/alpha_processing_neon.c +@@ -83,7 +83,7 @@ static void ApplyAlphaMultiply_NEON(uint8_t* rgba, int alpha_first, + static int DispatchAlpha_NEON(const uint8_t* alpha, int alpha_stride, + int width, int height, + uint8_t* dst, int dst_stride) { +- uint32_t alpha_mask = 0xffffffffu; ++ uint32_t alpha_mask = 0xffu; + uint8x8_t mask8 = vdup_n_u8(0xff); + uint32_t tmp[2]; + int i, j; +@@ -107,6 +107,7 @@ static int DispatchAlpha_NEON(const uint8_t* alpha, int alpha_stride, + dst += dst_stride; + } + vst1_u8((uint8_t*)tmp, mask8); ++ alpha_mask *= 0x01010101; + alpha_mask &= tmp[0]; + alpha_mask &= tmp[1]; + return (alpha_mask != 0xffffffffu); +@@ -134,7 +135,7 @@ static void DispatchAlphaToGreen_NEON(const uint8_t* alpha, int alpha_stride, + static int ExtractAlpha_NEON(const uint8_t* argb, int argb_stride, + int width, int height, + uint8_t* alpha, int alpha_stride) { +- uint32_t alpha_mask = 0xffffffffu; ++ uint32_t alpha_mask = 0xffu; + uint8x8_t mask8 = vdup_n_u8(0xff); + uint32_t tmp[2]; + int i, j; +@@ -156,6 +157,7 @@ static int ExtractAlpha_NEON(const uint8_t* argb, int argb_stride, + alpha += alpha_stride; + } + vst1_u8((uint8_t*)tmp, mask8); ++ alpha_mask *= 0x0101010101; + alpha_mask &= tmp[0]; + alpha_mask &= tmp[1]; + return (alpha_mask == 0xffffffffu); +-- +2.41.0 + +From db94456bd1c20527b6011f42965bc27f2ec641be Mon Sep 17 00:00:00 2001 +From: James Zern <jzern@google.com> +Date: Fri, 4 Mar 2022 15:26:21 -0800 +Subject: [PATCH 4/9] alpha_processing_neon.c: fix 0x01... typo + +one instance was overlong leading to a int64->uint32 conversion warning + +Change-Id: I56d5ab75d89960c79293f62cd489d7ab519bbc34 +(cherry picked from commit 03d12190552c3e95d31aa00303f28a8a2f813bdd) +--- + src/dsp/alpha_processing_neon.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/dsp/alpha_processing_neon.c b/src/dsp/alpha_processing_neon.c +index 50f08989..27d71750 100644 +--- a/src/dsp/alpha_processing_neon.c ++++ b/src/dsp/alpha_processing_neon.c +@@ -157,7 +157,7 @@ static int ExtractAlpha_NEON(const uint8_t* argb, int argb_stride, + alpha += alpha_stride; + } + vst1_u8((uint8_t*)tmp, mask8); +- alpha_mask *= 0x0101010101; ++ alpha_mask *= 0x01010101; + alpha_mask &= tmp[0]; + alpha_mask &= tmp[1]; + return (alpha_mask == 0xffffffffu); +-- +2.41.0 + +From d29567c347b6c98d01ff19c32ba5a1351e964ae6 Mon Sep 17 00:00:00 2001 +From: James Zern <jzern@google.com> +Date: Mon, 4 Apr 2022 10:41:25 -0700 +Subject: [PATCH 5/9] VP8LEncodeStream: fix segfault on OOM + +initialize bw_side before calling EncoderAnalyze() & EncoderInit() which +may fail; previously this would cause a free of an invalid pointer in +VP8LBitWriterWipeOut(). + +since at least: +v0.6.0-120-gf8c2ac15 Multi-thread the lossless cruncher. + +Tested: +for i in `seq 1 639`; do + export MALLOC_FAIL_AT=$i + ./examples/cwebp -m 6 -q 100 -lossless jpeg_file +done + +Bug: webp:565 +Change-Id: I1c95883834b6e4b13aee890568ce3bad0f4266f0 +(cherry picked from commit fe153fae98a3fe4626ff537ec8d5f4477cec5739) +(cherry picked from commit ddd65f0d19bc6a3acbc48e49d315140ccf099b9a) +(cherry picked from commit 5d805f72051f0ae8aae61bbe1927c1a4bf9617ab) +--- + src/enc/vp8l_enc.c | 11 ++++++++--- + 1 file changed, 8 insertions(+), 3 deletions(-) + +diff --git a/src/enc/vp8l_enc.c b/src/enc/vp8l_enc.c +index 2efd403f..c9dea0bd 100644 +--- a/src/enc/vp8l_enc.c ++++ b/src/enc/vp8l_enc.c +@@ -1693,11 +1693,16 @@ WebPEncodingError VP8LEncodeStream(const WebPConfig* const config, + const WebPWorkerInterface* const worker_interface = WebPGetWorkerInterface(); + int ok_main; + ++ if (enc_main == NULL || !VP8LBitWriterInit(&bw_side, 0)) { ++ WebPEncodingSetError(picture, VP8_ENC_ERROR_OUT_OF_MEMORY); ++ VP8LEncoderDelete(enc_main); ++ return 0; ++ } ++ + // Analyze image (entropy, num_palettes etc) +- if (enc_main == NULL || +- !EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main, ++ if (!EncoderAnalyze(enc_main, crunch_configs, &num_crunch_configs_main, + &red_and_blue_always_zero) || +- !EncoderInit(enc_main) || !VP8LBitWriterInit(&bw_side, 0)) { ++ !EncoderInit(enc_main)) { + err = VP8_ENC_ERROR_OUT_OF_MEMORY; + goto Error; + } +-- +2.41.0 + +From 880207c0a73d40d6044cf228c05a6e8cff8cee80 Mon Sep 17 00:00:00 2001 +From: James Zern <jzern@google.com> +Date: Mon, 4 Apr 2022 10:44:06 -0700 +Subject: [PATCH 6/9] BackwardReferencesHashChainDistanceOnly: fix segfault on + OOM + +change CostManager to calloc to avoid frees on undefined pointer +values in CostManagerClear() should the cost_model allocation succeed, +but the cost_manager allocation fail + +since: +v0.5.0-93-g3e023c17 Speed-up BackwardReferencesHashChainDistanceOnly. + +Tested: +for i in `seq 1 639`; do + export MALLOC_FAIL_AT=$i + ./examples/cwebp -m 6 -q 100 -lossless jpeg_file +done + +Bug: webp:565 +Change-Id: I376d81e6f41eb73529053e9e30c142b4b4f6b45b +(cherry picked from commit a828a59b49d2e3fbc40dc42a6ee6426cd0f2c9dc) +(cherry picked from commit dd80bb43431c222762be47fd92f8a37e032bc2c0) +(cherry picked from commit 4d0964cd0c2b20668d77f965a7e2fbe0b7dcaea4) +--- + src/enc/backward_references_cost_enc.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/enc/backward_references_cost_enc.c b/src/enc/backward_references_cost_enc.c +index 516abd73..5eb24d44 100644 +--- a/src/enc/backward_references_cost_enc.c ++++ b/src/enc/backward_references_cost_enc.c +@@ -577,7 +577,7 @@ static int BackwardReferencesHashChainDistanceOnly( + (CostModel*)WebPSafeCalloc(1ULL, cost_model_size); + VP8LColorCache hashers; + CostManager* cost_manager = +- (CostManager*)WebPSafeMalloc(1ULL, sizeof(*cost_manager)); ++ (CostManager*)WebPSafeCalloc(1ULL, sizeof(*cost_manager)); + int offset_prev = -1, len_prev = -1; + double offset_cost = -1; + int first_offset_is_constant = -1; // initialized with 'impossible' value +-- +2.41.0 + +From 3f74b25288a79445984569d7d721eb69047324eb Mon Sep 17 00:00:00 2001 +From: James Zern <jzern@google.com> +Date: Mon, 4 Apr 2022 10:47:40 -0700 +Subject: [PATCH 7/9] GetBackwardReferences: fail on alloc error + +previously failures in the call to +VP8LBackwardReferencesTraceBackwards() would be ignored which, though it +wouldn't result in a crash, would produce non-deterministic output + +Change-Id: Id9890a60883c3270ec75e968506d46eea32b76d4 +(cherry picked from commit e3cfafaf719c2e163d3548d7a415da96fdff714f) +(cherry picked from commit 20ef03ee351d4ff03fc5ff3ec4804a879d1b9d5c) +(cherry picked from commit 89e226a3c70ae04fe8e1884aa0b291f39adb2fdf) +--- + src/enc/backward_references_enc.c | 13 +++++++------ + 1 file changed, 7 insertions(+), 6 deletions(-) + +diff --git a/src/enc/backward_references_enc.c b/src/enc/backward_references_enc.c +index d445b40f..59809b16 100644 +--- a/src/enc/backward_references_enc.c ++++ b/src/enc/backward_references_enc.c +@@ -912,13 +912,14 @@ static VP8LBackwardRefs* GetBackwardReferences( + quality >= 25) { + const VP8LHashChain* const hash_chain_tmp = + (lz77_type_best == kLZ77Standard) ? hash_chain : &hash_chain_box; +- if (VP8LBackwardReferencesTraceBackwards(width, height, argb, *cache_bits, +- hash_chain_tmp, best, worst)) { +- double bit_cost_trace; +- VP8LHistogramCreate(histo, worst, *cache_bits); +- bit_cost_trace = VP8LHistogramEstimateBits(histo); +- if (bit_cost_trace < bit_cost_best) best = worst; ++ double bit_cost_trace; ++ if (!VP8LBackwardReferencesTraceBackwards(width, height, argb, *cache_bits, ++ hash_chain_tmp, best, worst)) { ++ goto Error; + } ++ VP8LHistogramCreate(histo, worst, *cache_bits); ++ bit_cost_trace = VP8LHistogramEstimateBits(histo); ++ if (bit_cost_trace < bit_cost_best) best = worst; + } + + BackwardReferences2DLocality(width, best); +-- +2.41.0 + +From d01505c7777b315c8f2da6ae9c68a80b064317df Mon Sep 17 00:00:00 2001 +From: James Zern <jzern@google.com> +Date: Wed, 22 Feb 2023 22:15:47 -0800 +Subject: [PATCH 8/9] EncodeAlphaInternal: clear result->bw on error + +This avoids a double free should the function fail prior to +VP8BitWriterInit() and a previous trial result's buffer carried over. +Previously in ApplyFiltersAndEncode() trial.bw (with a previous +iteration's buffer) would be freed, followed by best.bw pointing to the +same buffer. + +Since: +187d379d add a fallback to ALPHA_NO_COMPRESSION + +In addition, check the return value of VP8BitWriterInit() in this +function. + +Bug: webp:603 +Change-Id: Ic258381ee26c8c16bc211d157c8153831c8c6910 +(cherry picked from commit a486d800b60d0af4cc0836bf7ed8f21e12974129) +--- + src/enc/alpha_enc.c | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/src/enc/alpha_enc.c b/src/enc/alpha_enc.c +index dce9ca95..c786ae59 100644 +--- a/src/enc/alpha_enc.c ++++ b/src/enc/alpha_enc.c +@@ -13,6 +13,7 @@ + + #include <assert.h> + #include <stdlib.h> ++#include <string.h> + + #include "src/enc/vp8i_enc.h" + #include "src/dsp/dsp.h" +@@ -148,6 +149,7 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height, + } + } else { + VP8LBitWriterWipeOut(&tmp_bw); ++ memset(&result->bw, 0, sizeof(result->bw)); + return 0; + } + } +@@ -162,7 +164,7 @@ static int EncodeAlphaInternal(const uint8_t* const data, int width, int height, + header = method | (filter << 2); + if (reduce_levels) header |= ALPHA_PREPROCESSED_LEVELS << 4; + +- VP8BitWriterInit(&result->bw, ALPHA_HEADER_LEN + output_size); ++ if (!VP8BitWriterInit(&result->bw, ALPHA_HEADER_LEN + output_size)) ok = 0; + ok = ok && VP8BitWriterAppend(&result->bw, &header, ALPHA_HEADER_LEN); + ok = ok && VP8BitWriterAppend(&result->bw, output, output_size); + +-- +2.41.0 + +From 8d9916da9074535517481f9ccbdee706a89ac842 Mon Sep 17 00:00:00 2001 +From: Vincent Rabaud <vrabaud@google.com> +Date: Thu, 7 Sep 2023 21:16:03 +0200 +Subject: [PATCH 9/9] Fix OOB write in BuildHuffmanTable. + +First, BuildHuffmanTable is called to check if the data is valid. +If it is and the table is not big enough, more memory is allocated. + +This will make sure that valid (but unoptimized because of unbalanced +codes) streams are still decodable. + +Bug: chromium:1479274 +Change-Id: I31c36dbf3aa78d35ecf38706b50464fd3d375741 +(cherry picked from commit 902bc9190331343b2017211debcec8d2ab87e17a) +(cherry picked from commit 2af26267cdfcb63a88e5c74a85927a12d6ca1d76) +--- + src/dec/vp8l_dec.c | 46 ++++++++++--------- + src/dec/vp8li_dec.h | 2 +- + src/utils/huffman_utils.c | 97 +++++++++++++++++++++++++++++++-------- + src/utils/huffman_utils.h | 27 +++++++++-- + 4 files changed, 129 insertions(+), 43 deletions(-) + +diff --git a/src/dec/vp8l_dec.c b/src/dec/vp8l_dec.c +index 93615d4e..0d38314d 100644 +--- a/src/dec/vp8l_dec.c ++++ b/src/dec/vp8l_dec.c +@@ -253,11 +253,11 @@ static int ReadHuffmanCodeLengths( + int symbol; + int max_symbol; + int prev_code_len = DEFAULT_CODE_LENGTH; +- HuffmanCode table[1 << LENGTHS_TABLE_BITS]; ++ HuffmanTables tables; + +- if (!VP8LBuildHuffmanTable(table, LENGTHS_TABLE_BITS, +- code_length_code_lengths, +- NUM_CODE_LENGTH_CODES)) { ++ if (!VP8LHuffmanTablesAllocate(1 << LENGTHS_TABLE_BITS, &tables) || ++ !VP8LBuildHuffmanTable(&tables, LENGTHS_TABLE_BITS, ++ code_length_code_lengths, NUM_CODE_LENGTH_CODES)) { + goto End; + } + +@@ -277,7 +277,7 @@ static int ReadHuffmanCodeLengths( + int code_len; + if (max_symbol-- == 0) break; + VP8LFillBitWindow(br); +- p = &table[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK]; ++ p = &tables.curr_segment->start[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK]; + VP8LSetBitPos(br, br->bit_pos_ + p->bits); + code_len = p->value; + if (code_len < kCodeLengthLiterals) { +@@ -300,6 +300,7 @@ static int ReadHuffmanCodeLengths( + ok = 1; + + End: ++ VP8LHuffmanTablesDeallocate(&tables); + if (!ok) dec->status_ = VP8_STATUS_BITSTREAM_ERROR; + return ok; + } +@@ -307,7 +308,8 @@ static int ReadHuffmanCodeLengths( + // 'code_lengths' is pre-allocated temporary buffer, used for creating Huffman + // tree. + static int ReadHuffmanCode(int alphabet_size, VP8LDecoder* const dec, +- int* const code_lengths, HuffmanCode* const table) { ++ int* const code_lengths, ++ HuffmanTables* const table) { + int ok = 0; + int size = 0; + VP8LBitReader* const br = &dec->br_; +@@ -362,8 +364,7 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, + VP8LMetadata* const hdr = &dec->hdr_; + uint32_t* huffman_image = NULL; + HTreeGroup* htree_groups = NULL; +- HuffmanCode* huffman_tables = NULL; +- HuffmanCode* huffman_table = NULL; ++ HuffmanTables* huffman_tables = &hdr->huffman_tables_; + int num_htree_groups = 1; + int num_htree_groups_max = 1; + int max_alphabet_size = 0; +@@ -372,6 +373,10 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, + int* mapping = NULL; + int ok = 0; + ++ // Check the table has been 0 initialized (through InitMetadata). ++ assert(huffman_tables->root.start == NULL); ++ assert(huffman_tables->curr_segment == NULL); ++ + if (allow_recursion && VP8LReadBits(br, 1)) { + // use meta Huffman codes. + const int huffman_precision = VP8LReadBits(br, 3) + 2; +@@ -434,16 +439,15 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, + + code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size, + sizeof(*code_lengths)); +- huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size, +- sizeof(*huffman_tables)); + htree_groups = VP8LHtreeGroupsNew(num_htree_groups); + +- if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) { ++ if (htree_groups == NULL || code_lengths == NULL || ++ !VP8LHuffmanTablesAllocate(num_htree_groups * table_size, ++ huffman_tables)) { + dec->status_ = VP8_STATUS_OUT_OF_MEMORY; + goto Error; + } + +- huffman_table = huffman_tables; + for (i = 0; i < num_htree_groups_max; ++i) { + // If the index "i" is unused in the Huffman image, just make sure the + // coefficients are valid but do not store them. +@@ -468,19 +472,20 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, + int max_bits = 0; + for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) { + int alphabet_size = kAlphabetSize[j]; +- htrees[j] = huffman_table; + if (j == 0 && color_cache_bits > 0) { + alphabet_size += (1 << color_cache_bits); + } +- size = ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_table); ++ size = ++ ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables); ++ htrees[j] = huffman_tables->curr_segment->curr_table; + if (size == 0) { + goto Error; + } + if (is_trivial_literal && kLiteralMap[j] == 1) { +- is_trivial_literal = (huffman_table->bits == 0); ++ is_trivial_literal = (htrees[j]->bits == 0); + } +- total_size += huffman_table->bits; +- huffman_table += size; ++ total_size += htrees[j]->bits; ++ huffman_tables->curr_segment->curr_table += size; + if (j <= ALPHA) { + int local_max_bits = code_lengths[0]; + int k; +@@ -515,14 +520,13 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize, + hdr->huffman_image_ = huffman_image; + hdr->num_htree_groups_ = num_htree_groups; + hdr->htree_groups_ = htree_groups; +- hdr->huffman_tables_ = huffman_tables; + + Error: + WebPSafeFree(code_lengths); + WebPSafeFree(mapping); + if (!ok) { + WebPSafeFree(huffman_image); +- WebPSafeFree(huffman_tables); ++ VP8LHuffmanTablesDeallocate(huffman_tables); + VP8LHtreeGroupsFree(htree_groups); + } + return ok; +@@ -1354,7 +1358,7 @@ static void ClearMetadata(VP8LMetadata* const hdr) { + assert(hdr != NULL); + + WebPSafeFree(hdr->huffman_image_); +- WebPSafeFree(hdr->huffman_tables_); ++ VP8LHuffmanTablesDeallocate(&hdr->huffman_tables_); + VP8LHtreeGroupsFree(hdr->htree_groups_); + VP8LColorCacheClear(&hdr->color_cache_); + VP8LColorCacheClear(&hdr->saved_color_cache_); +@@ -1670,7 +1674,7 @@ int VP8LDecodeImage(VP8LDecoder* const dec) { + // Sanity checks. + if (dec == NULL) return 0; + +- assert(dec->hdr_.huffman_tables_ != NULL); ++ assert(dec->hdr_.huffman_tables_.root.start != NULL); + assert(dec->hdr_.htree_groups_ != NULL); + assert(dec->hdr_.num_htree_groups_ > 0); + +diff --git a/src/dec/vp8li_dec.h b/src/dec/vp8li_dec.h +index 0a4d613f..4677de62 100644 +--- a/src/dec/vp8li_dec.h ++++ b/src/dec/vp8li_dec.h +@@ -51,7 +51,7 @@ typedef struct { + uint32_t *huffman_image_; + int num_htree_groups_; + HTreeGroup *htree_groups_; +- HuffmanCode *huffman_tables_; ++ HuffmanTables huffman_tables_; + } VP8LMetadata; + + typedef struct VP8LDecoder VP8LDecoder; +diff --git a/src/utils/huffman_utils.c b/src/utils/huffman_utils.c +index 0cba0fbb..9efd6283 100644 +--- a/src/utils/huffman_utils.c ++++ b/src/utils/huffman_utils.c +@@ -177,21 +177,24 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, + if (num_open < 0) { + return 0; + } +- if (root_table == NULL) continue; + for (; count[len] > 0; --count[len]) { + HuffmanCode code; + if ((key & mask) != low) { +- table += table_size; ++ if (root_table != NULL) table += table_size; + table_bits = NextTableBitSize(count, len, root_bits); + table_size = 1 << table_bits; + total_size += table_size; + low = key & mask; +- root_table[low].bits = (uint8_t)(table_bits + root_bits); +- root_table[low].value = (uint16_t)((table - root_table) - low); ++ if (root_table != NULL) { ++ root_table[low].bits = (uint8_t)(table_bits + root_bits); ++ root_table[low].value = (uint16_t)((table - root_table) - low); ++ } ++ } ++ if (root_table != NULL) { ++ code.bits = (uint8_t)(len - root_bits); ++ code.value = (uint16_t)sorted[symbol++]; ++ ReplicateValue(&table[key >> root_bits], step, table_size, code); + } +- code.bits = (uint8_t)(len - root_bits); +- code.value = (uint16_t)sorted[symbol++]; +- ReplicateValue(&table[key >> root_bits], step, table_size, code); + key = GetNextKey(key, len); + } + } +@@ -211,25 +214,83 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits, + ((1 << MAX_CACHE_BITS) + NUM_LITERAL_CODES + NUM_LENGTH_CODES) + // Cut-off value for switching between heap and stack allocation. + #define SORTED_SIZE_CUTOFF 512 +-int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits, ++int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits, + const int code_lengths[], int code_lengths_size) { +- int total_size; ++ const int total_size = ++ BuildHuffmanTable(NULL, root_bits, code_lengths, code_lengths_size, NULL); + assert(code_lengths_size <= MAX_CODE_LENGTHS_SIZE); +- if (root_table == NULL) { +- total_size = BuildHuffmanTable(NULL, root_bits, +- code_lengths, code_lengths_size, NULL); +- } else if (code_lengths_size <= SORTED_SIZE_CUTOFF) { ++ if (total_size == 0 || root_table == NULL) return total_size; ++ ++ if (root_table->curr_segment->curr_table + total_size >= ++ root_table->curr_segment->start + root_table->curr_segment->size) { ++ // If 'root_table' does not have enough memory, allocate a new segment. ++ // The available part of root_table->curr_segment is left unused because we ++ // need a contiguous buffer. ++ const int segment_size = root_table->curr_segment->size; ++ struct HuffmanTablesSegment* next = ++ (HuffmanTablesSegment*)WebPSafeMalloc(1, sizeof(*next)); ++ if (next == NULL) return 0; ++ // Fill the new segment. ++ // We need at least 'total_size' but if that value is small, it is better to ++ // allocate a big chunk to prevent more allocations later. 'segment_size' is ++ // therefore chosen (any other arbitrary value could be chosen). ++ next->size = total_size > segment_size ? total_size : segment_size; ++ next->start = ++ (HuffmanCode*)WebPSafeMalloc(next->size, sizeof(*next->start)); ++ if (next->start == NULL) { ++ WebPSafeFree(next); ++ return 0; ++ } ++ next->curr_table = next->start; ++ next->next = NULL; ++ // Point to the new segment. ++ root_table->curr_segment->next = next; ++ root_table->curr_segment = next; ++ } ++ if (code_lengths_size <= SORTED_SIZE_CUTOFF) { + // use local stack-allocated array. + uint16_t sorted[SORTED_SIZE_CUTOFF]; +- total_size = BuildHuffmanTable(root_table, root_bits, +- code_lengths, code_lengths_size, sorted); +- } else { // rare case. Use heap allocation. ++ BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits, ++ code_lengths, code_lengths_size, sorted); ++ } else { // rare case. Use heap allocation. + uint16_t* const sorted = + (uint16_t*)WebPSafeMalloc(code_lengths_size, sizeof(*sorted)); + if (sorted == NULL) return 0; +- total_size = BuildHuffmanTable(root_table, root_bits, +- code_lengths, code_lengths_size, sorted); ++ BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits, ++ code_lengths, code_lengths_size, sorted); + WebPSafeFree(sorted); + } + return total_size; + } ++ ++int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables) { ++ // Have 'segment' point to the first segment for now, 'root'. ++ HuffmanTablesSegment* const root = &huffman_tables->root; ++ huffman_tables->curr_segment = root; ++ // Allocate root. ++ root->start = (HuffmanCode*)WebPSafeMalloc(size, sizeof(*root->start)); ++ if (root->start == NULL) return 0; ++ root->curr_table = root->start; ++ root->next = NULL; ++ root->size = size; ++ return 1; ++} ++ ++void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables) { ++ HuffmanTablesSegment *current, *next; ++ if (huffman_tables == NULL) return; ++ // Free the root node. ++ current = &huffman_tables->root; ++ next = current->next; ++ WebPSafeFree(current->start); ++ current->start = NULL; ++ current->next = NULL; ++ current = next; ++ // Free the following nodes. ++ while (current != NULL) { ++ next = current->next; ++ WebPSafeFree(current->start); ++ WebPSafeFree(current); ++ current = next; ++ } ++} +diff --git a/src/utils/huffman_utils.h b/src/utils/huffman_utils.h +index 13b7ad1a..98415c53 100644 +--- a/src/utils/huffman_utils.h ++++ b/src/utils/huffman_utils.h +@@ -43,6 +43,29 @@ typedef struct { + // or non-literal symbol otherwise + } HuffmanCode32; + ++// Contiguous memory segment of HuffmanCodes. ++typedef struct HuffmanTablesSegment { ++ HuffmanCode* start; ++ // Pointer to where we are writing into the segment. Starts at 'start' and ++ // cannot go beyond 'start' + 'size'. ++ HuffmanCode* curr_table; ++ // Pointer to the next segment in the chain. ++ struct HuffmanTablesSegment* next; ++ int size; ++} HuffmanTablesSegment; ++ ++// Chained memory segments of HuffmanCodes. ++typedef struct HuffmanTables { ++ HuffmanTablesSegment root; ++ // Currently processed segment. At first, this is 'root'. ++ HuffmanTablesSegment* curr_segment; ++} HuffmanTables; ++ ++// Allocates a HuffmanTables with 'size' contiguous HuffmanCodes. Returns 0 on ++// memory allocation error, 1 otherwise. ++int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables); ++void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables); ++ + #define HUFFMAN_PACKED_BITS 6 + #define HUFFMAN_PACKED_TABLE_SIZE (1u << HUFFMAN_PACKED_BITS) + +@@ -78,9 +101,7 @@ void VP8LHtreeGroupsFree(HTreeGroup* const htree_groups); + // the huffman table. + // Returns built table size or 0 in case of error (invalid tree or + // memory error). +-// If root_table is NULL, it returns 0 if a lookup cannot be built, something +-// > 0 otherwise (but not the table size). +-int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits, ++int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits, + const int code_lengths[], int code_lengths_size); + + #ifdef __cplusplus +-- +2.41.0 + diff --git a/libwebp.spec b/libwebp.spec index d7ad82e..d5e20fc 100644 --- a/libwebp.spec +++ b/libwebp.spec @@ -21,14 +21,18 @@ Name: %{libname} Name: %{libname}%{soname} %endif Version: 1.0.3 -Release: 1%{?dist} +Release: 2%{?dist} URL: http://webmproject.org/ Summary: Library and tools for the WebP graphics format # Additional IPR is licensed as well. See PATENTS file for details -License: BSD +License: BSD-3-Clause Source0: http://downloads.webmproject.org/releases/webp/%{libname}-%{version}.tar.gz Source1: libwebp_jni_example.java +# Upstream patches since release +# https://github.com/webmproject/libwebp/commits/1.0.3 +Patch0: libwebp-upstream.patch + BuildRequires: libjpeg-devel BuildRequires: libpng-devel BuildRequires: giflib-devel @@ -107,6 +111,7 @@ Java bindings for libwebp. %prep %setup -q -n %{libname}-%{version} +%patch -P0 -p1 -b .up %build @@ -194,6 +199,9 @@ cp swig/*.jar swig/*.so %{buildroot}/%{_libdir}/%{name}-java/ %changelog +* Thu Sep 28 2023 Remi Collet <remi@remirepo.net> - 1.0.3-2 +- add upstream patched with fix for CVE-2023-4863 + * Fri Jan 3 2020 Remi Collet <remi@remirepo.net> - 1.0.3-1 - update to 1.0.3 |