summaryrefslogtreecommitdiffstats
path: root/meta/recipes-multimedia/webp/files/CVE-2023-4863-0001.patch
diff options
context:
space:
mode:
Diffstat (limited to 'meta/recipes-multimedia/webp/files/CVE-2023-4863-0001.patch')
-rw-r--r--meta/recipes-multimedia/webp/files/CVE-2023-4863-0001.patch366
1 files changed, 366 insertions, 0 deletions
diff --git a/meta/recipes-multimedia/webp/files/CVE-2023-4863-0001.patch b/meta/recipes-multimedia/webp/files/CVE-2023-4863-0001.patch
new file mode 100644
index 0000000000..419b12f7d9
--- /dev/null
+++ b/meta/recipes-multimedia/webp/files/CVE-2023-4863-0001.patch
@@ -0,0 +1,366 @@
1From 902bc9190331343b2017211debcec8d2ab87e17a Mon Sep 17 00:00:00 2001
2From: Vincent Rabaud <vrabaud@google.com>
3Date: Thu, 7 Sep 2023 21:16:03 +0200
4Subject: [PATCH 1/2] Fix OOB write in BuildHuffmanTable.
5
6First, BuildHuffmanTable is called to check if the data is valid.
7If it is and the table is not big enough, more memory is allocated.
8
9This will make sure that valid (but unoptimized because of unbalanced
10codes) streams are still decodable.
11
12Bug: chromium:1479274
13Change-Id: I31c36dbf3aa78d35ecf38706b50464fd3d375741
14
15CVE: CVE-2023-4863
16
17Upstream-Status: Backport [https://github.com/webmproject/libwebp/commit/902bc9190331343b2017211debcec8d2ab87e17a]
18
19Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com>
20---
21 src/dec/vp8l_dec.c | 46 ++++++++++---------
22 src/dec/vp8li_dec.h | 2 +-
23 src/utils/huffman_utils.c | 97 +++++++++++++++++++++++++++++++--------
24 src/utils/huffman_utils.h | 27 +++++++++--
25 4 files changed, 129 insertions(+), 43 deletions(-)
26
27diff --git a/src/dec/vp8l_dec.c b/src/dec/vp8l_dec.c
28index 93615d4..0d38314 100644
29--- a/src/dec/vp8l_dec.c
30+++ b/src/dec/vp8l_dec.c
31@@ -253,11 +253,11 @@ static int ReadHuffmanCodeLengths(
32 int symbol;
33 int max_symbol;
34 int prev_code_len = DEFAULT_CODE_LENGTH;
35- HuffmanCode table[1 << LENGTHS_TABLE_BITS];
36+ HuffmanTables tables;
37
38- if (!VP8LBuildHuffmanTable(table, LENGTHS_TABLE_BITS,
39- code_length_code_lengths,
40- NUM_CODE_LENGTH_CODES)) {
41+ if (!VP8LHuffmanTablesAllocate(1 << LENGTHS_TABLE_BITS, &tables) ||
42+ !VP8LBuildHuffmanTable(&tables, LENGTHS_TABLE_BITS,
43+ code_length_code_lengths, NUM_CODE_LENGTH_CODES)) {
44 goto End;
45 }
46
47@@ -277,7 +277,7 @@ static int ReadHuffmanCodeLengths(
48 int code_len;
49 if (max_symbol-- == 0) break;
50 VP8LFillBitWindow(br);
51- p = &table[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
52+ p = &tables.curr_segment->start[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
53 VP8LSetBitPos(br, br->bit_pos_ + p->bits);
54 code_len = p->value;
55 if (code_len < kCodeLengthLiterals) {
56@@ -300,6 +300,7 @@ static int ReadHuffmanCodeLengths(
57 ok = 1;
58
59 End:
60+ VP8LHuffmanTablesDeallocate(&tables);
61 if (!ok) dec->status_ = VP8_STATUS_BITSTREAM_ERROR;
62 return ok;
63 }
64@@ -307,7 +308,8 @@ static int ReadHuffmanCodeLengths(
65 // 'code_lengths' is pre-allocated temporary buffer, used for creating Huffman
66 // tree.
67 static int ReadHuffmanCode(int alphabet_size, VP8LDecoder* const dec,
68- int* const code_lengths, HuffmanCode* const table) {
69+ int* const code_lengths,
70+ HuffmanTables* const table) {
71 int ok = 0;
72 int size = 0;
73 VP8LBitReader* const br = &dec->br_;
74@@ -362,8 +364,7 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
75 VP8LMetadata* const hdr = &dec->hdr_;
76 uint32_t* huffman_image = NULL;
77 HTreeGroup* htree_groups = NULL;
78- HuffmanCode* huffman_tables = NULL;
79- HuffmanCode* huffman_table = NULL;
80+ HuffmanTables* huffman_tables = &hdr->huffman_tables_;
81 int num_htree_groups = 1;
82 int num_htree_groups_max = 1;
83 int max_alphabet_size = 0;
84@@ -372,6 +373,10 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
85 int* mapping = NULL;
86 int ok = 0;
87
88+ // Check the table has been 0 initialized (through InitMetadata).
89+ assert(huffman_tables->root.start == NULL);
90+ assert(huffman_tables->curr_segment == NULL);
91+
92 if (allow_recursion && VP8LReadBits(br, 1)) {
93 // use meta Huffman codes.
94 const int huffman_precision = VP8LReadBits(br, 3) + 2;
95@@ -434,16 +439,15 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
96
97 code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size,
98 sizeof(*code_lengths));
99- huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size,
100- sizeof(*huffman_tables));
101 htree_groups = VP8LHtreeGroupsNew(num_htree_groups);
102
103- if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) {
104+ if (htree_groups == NULL || code_lengths == NULL ||
105+ !VP8LHuffmanTablesAllocate(num_htree_groups * table_size,
106+ huffman_tables)) {
107 dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
108 goto Error;
109 }
110
111- huffman_table = huffman_tables;
112 for (i = 0; i < num_htree_groups_max; ++i) {
113 // If the index "i" is unused in the Huffman image, just make sure the
114 // coefficients are valid but do not store them.
115@@ -468,19 +472,20 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
116 int max_bits = 0;
117 for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) {
118 int alphabet_size = kAlphabetSize[j];
119- htrees[j] = huffman_table;
120 if (j == 0 && color_cache_bits > 0) {
121 alphabet_size += (1 << color_cache_bits);
122 }
123- size = ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_table);
124+ size =
125+ ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables);
126+ htrees[j] = huffman_tables->curr_segment->curr_table;
127 if (size == 0) {
128 goto Error;
129 }
130 if (is_trivial_literal && kLiteralMap[j] == 1) {
131- is_trivial_literal = (huffman_table->bits == 0);
132+ is_trivial_literal = (htrees[j]->bits == 0);
133 }
134- total_size += huffman_table->bits;
135- huffman_table += size;
136+ total_size += htrees[j]->bits;
137+ huffman_tables->curr_segment->curr_table += size;
138 if (j <= ALPHA) {
139 int local_max_bits = code_lengths[0];
140 int k;
141@@ -515,14 +520,13 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
142 hdr->huffman_image_ = huffman_image;
143 hdr->num_htree_groups_ = num_htree_groups;
144 hdr->htree_groups_ = htree_groups;
145- hdr->huffman_tables_ = huffman_tables;
146
147 Error:
148 WebPSafeFree(code_lengths);
149 WebPSafeFree(mapping);
150 if (!ok) {
151 WebPSafeFree(huffman_image);
152- WebPSafeFree(huffman_tables);
153+ VP8LHuffmanTablesDeallocate(huffman_tables);
154 VP8LHtreeGroupsFree(htree_groups);
155 }
156 return ok;
157@@ -1354,7 +1358,7 @@ static void ClearMetadata(VP8LMetadata* const hdr) {
158 assert(hdr != NULL);
159
160 WebPSafeFree(hdr->huffman_image_);
161- WebPSafeFree(hdr->huffman_tables_);
162+ VP8LHuffmanTablesDeallocate(&hdr->huffman_tables_);
163 VP8LHtreeGroupsFree(hdr->htree_groups_);
164 VP8LColorCacheClear(&hdr->color_cache_);
165 VP8LColorCacheClear(&hdr->saved_color_cache_);
166@@ -1670,7 +1674,7 @@ int VP8LDecodeImage(VP8LDecoder* const dec) {
167 // Sanity checks.
168 if (dec == NULL) return 0;
169
170- assert(dec->hdr_.huffman_tables_ != NULL);
171+ assert(dec->hdr_.huffman_tables_.root.start != NULL);
172 assert(dec->hdr_.htree_groups_ != NULL);
173 assert(dec->hdr_.num_htree_groups_ > 0);
174
175diff --git a/src/dec/vp8li_dec.h b/src/dec/vp8li_dec.h
176index 72b2e86..32540a4 100644
177--- a/src/dec/vp8li_dec.h
178+++ b/src/dec/vp8li_dec.h
179@@ -51,7 +51,7 @@ typedef struct {
180 uint32_t* huffman_image_;
181 int num_htree_groups_;
182 HTreeGroup* htree_groups_;
183- HuffmanCode* huffman_tables_;
184+ HuffmanTables huffman_tables_;
185 } VP8LMetadata;
186
187 typedef struct VP8LDecoder VP8LDecoder;
188diff --git a/src/utils/huffman_utils.c b/src/utils/huffman_utils.c
189index 0cba0fb..9efd628 100644
190--- a/src/utils/huffman_utils.c
191+++ b/src/utils/huffman_utils.c
192@@ -177,21 +177,24 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
193 if (num_open < 0) {
194 return 0;
195 }
196- if (root_table == NULL) continue;
197 for (; count[len] > 0; --count[len]) {
198 HuffmanCode code;
199 if ((key & mask) != low) {
200- table += table_size;
201+ if (root_table != NULL) table += table_size;
202 table_bits = NextTableBitSize(count, len, root_bits);
203 table_size = 1 << table_bits;
204 total_size += table_size;
205 low = key & mask;
206- root_table[low].bits = (uint8_t)(table_bits + root_bits);
207- root_table[low].value = (uint16_t)((table - root_table) - low);
208+ if (root_table != NULL) {
209+ root_table[low].bits = (uint8_t)(table_bits + root_bits);
210+ root_table[low].value = (uint16_t)((table - root_table) - low);
211+ }
212+ }
213+ if (root_table != NULL) {
214+ code.bits = (uint8_t)(len - root_bits);
215+ code.value = (uint16_t)sorted[symbol++];
216+ ReplicateValue(&table[key >> root_bits], step, table_size, code);
217 }
218- code.bits = (uint8_t)(len - root_bits);
219- code.value = (uint16_t)sorted[symbol++];
220- ReplicateValue(&table[key >> root_bits], step, table_size, code);
221 key = GetNextKey(key, len);
222 }
223 }
224@@ -211,25 +214,83 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
225 ((1 << MAX_CACHE_BITS) + NUM_LITERAL_CODES + NUM_LENGTH_CODES)
226 // Cut-off value for switching between heap and stack allocation.
227 #define SORTED_SIZE_CUTOFF 512
228-int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
229+int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
230 const int code_lengths[], int code_lengths_size) {
231- int total_size;
232+ const int total_size =
233+ BuildHuffmanTable(NULL, root_bits, code_lengths, code_lengths_size, NULL);
234 assert(code_lengths_size <= MAX_CODE_LENGTHS_SIZE);
235- if (root_table == NULL) {
236- total_size = BuildHuffmanTable(NULL, root_bits,
237- code_lengths, code_lengths_size, NULL);
238- } else if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
239+ if (total_size == 0 || root_table == NULL) return total_size;
240+
241+ if (root_table->curr_segment->curr_table + total_size >=
242+ root_table->curr_segment->start + root_table->curr_segment->size) {
243+ // If 'root_table' does not have enough memory, allocate a new segment.
244+ // The available part of root_table->curr_segment is left unused because we
245+ // need a contiguous buffer.
246+ const int segment_size = root_table->curr_segment->size;
247+ struct HuffmanTablesSegment* next =
248+ (HuffmanTablesSegment*)WebPSafeMalloc(1, sizeof(*next));
249+ if (next == NULL) return 0;
250+ // Fill the new segment.
251+ // We need at least 'total_size' but if that value is small, it is better to
252+ // allocate a big chunk to prevent more allocations later. 'segment_size' is
253+ // therefore chosen (any other arbitrary value could be chosen).
254+ next->size = total_size > segment_size ? total_size : segment_size;
255+ next->start =
256+ (HuffmanCode*)WebPSafeMalloc(next->size, sizeof(*next->start));
257+ if (next->start == NULL) {
258+ WebPSafeFree(next);
259+ return 0;
260+ }
261+ next->curr_table = next->start;
262+ next->next = NULL;
263+ // Point to the new segment.
264+ root_table->curr_segment->next = next;
265+ root_table->curr_segment = next;
266+ }
267+ if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
268 // use local stack-allocated array.
269 uint16_t sorted[SORTED_SIZE_CUTOFF];
270- total_size = BuildHuffmanTable(root_table, root_bits,
271- code_lengths, code_lengths_size, sorted);
272- } else { // rare case. Use heap allocation.
273+ BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
274+ code_lengths, code_lengths_size, sorted);
275+ } else { // rare case. Use heap allocation.
276 uint16_t* const sorted =
277 (uint16_t*)WebPSafeMalloc(code_lengths_size, sizeof(*sorted));
278 if (sorted == NULL) return 0;
279- total_size = BuildHuffmanTable(root_table, root_bits,
280- code_lengths, code_lengths_size, sorted);
281+ BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
282+ code_lengths, code_lengths_size, sorted);
283 WebPSafeFree(sorted);
284 }
285 return total_size;
286 }
287+
288+int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables) {
289+ // Have 'segment' point to the first segment for now, 'root'.
290+ HuffmanTablesSegment* const root = &huffman_tables->root;
291+ huffman_tables->curr_segment = root;
292+ // Allocate root.
293+ root->start = (HuffmanCode*)WebPSafeMalloc(size, sizeof(*root->start));
294+ if (root->start == NULL) return 0;
295+ root->curr_table = root->start;
296+ root->next = NULL;
297+ root->size = size;
298+ return 1;
299+}
300+
301+void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables) {
302+ HuffmanTablesSegment *current, *next;
303+ if (huffman_tables == NULL) return;
304+ // Free the root node.
305+ current = &huffman_tables->root;
306+ next = current->next;
307+ WebPSafeFree(current->start);
308+ current->start = NULL;
309+ current->next = NULL;
310+ current = next;
311+ // Free the following nodes.
312+ while (current != NULL) {
313+ next = current->next;
314+ WebPSafeFree(current->start);
315+ WebPSafeFree(current);
316+ current = next;
317+ }
318+}
319diff --git a/src/utils/huffman_utils.h b/src/utils/huffman_utils.h
320index 13b7ad1..98415c5 100644
321--- a/src/utils/huffman_utils.h
322+++ b/src/utils/huffman_utils.h
323@@ -43,6 +43,29 @@ typedef struct {
324 // or non-literal symbol otherwise
325 } HuffmanCode32;
326
327+// Contiguous memory segment of HuffmanCodes.
328+typedef struct HuffmanTablesSegment {
329+ HuffmanCode* start;
330+ // Pointer to where we are writing into the segment. Starts at 'start' and
331+ // cannot go beyond 'start' + 'size'.
332+ HuffmanCode* curr_table;
333+ // Pointer to the next segment in the chain.
334+ struct HuffmanTablesSegment* next;
335+ int size;
336+} HuffmanTablesSegment;
337+
338+// Chained memory segments of HuffmanCodes.
339+typedef struct HuffmanTables {
340+ HuffmanTablesSegment root;
341+ // Currently processed segment. At first, this is 'root'.
342+ HuffmanTablesSegment* curr_segment;
343+} HuffmanTables;
344+
345+// Allocates a HuffmanTables with 'size' contiguous HuffmanCodes. Returns 0 on
346+// memory allocation error, 1 otherwise.
347+int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables);
348+void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables);
349+
350 #define HUFFMAN_PACKED_BITS 6
351 #define HUFFMAN_PACKED_TABLE_SIZE (1u << HUFFMAN_PACKED_BITS)
352
353@@ -78,9 +101,7 @@ void VP8LHtreeGroupsFree(HTreeGroup* const htree_groups);
354 // the huffman table.
355 // Returns built table size or 0 in case of error (invalid tree or
356 // memory error).
357-// If root_table is NULL, it returns 0 if a lookup cannot be built, something
358-// > 0 otherwise (but not the table size).
359-int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
360+int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
361 const int code_lengths[], int code_lengths_size);
362
363 #ifdef __cplusplus
364--
3652.40.0
366