libwebp: Sync with upstream 1.3.2
This commit is contained in:
parent
b467afe65d
commit
56cfeda7d2
|
@ -417,7 +417,7 @@ Files extracted from upstream source:
|
|||
## libwebp
|
||||
|
||||
- Upstream: https://chromium.googlesource.com/webm/libwebp/
|
||||
- Version: 1.3.1 (fd7bb21c0cb56e8a82e9bfa376164b842f433f3b, 2023)
|
||||
- Version: 1.3.2 (ca332209cb5567c9b249c86788cb2dbf8847e760, 2023)
|
||||
- License: BSD-3-Clause
|
||||
|
||||
Files extracted from upstream source:
|
||||
|
|
|
@ -32,7 +32,7 @@ extern "C" {
|
|||
// version numbers
|
||||
#define DEC_MAJ_VERSION 1
|
||||
#define DEC_MIN_VERSION 3
|
||||
#define DEC_REV_VERSION 1
|
||||
#define DEC_REV_VERSION 2
|
||||
|
||||
// YUV-cache parameters. Cache is 32-bytes wide (= one cacheline).
|
||||
// Constraints are: We need to store one 16x16 block of luma samples (y),
|
||||
|
|
|
@ -253,11 +253,11 @@ static int ReadHuffmanCodeLengths(
|
|||
int symbol;
|
||||
int max_symbol;
|
||||
int prev_code_len = DEFAULT_CODE_LENGTH;
|
||||
HuffmanCode table[1 << LENGTHS_TABLE_BITS];
|
||||
HuffmanTables tables;
|
||||
|
||||
if (!VP8LBuildHuffmanTable(table, LENGTHS_TABLE_BITS,
|
||||
code_length_code_lengths,
|
||||
NUM_CODE_LENGTH_CODES)) {
|
||||
if (!VP8LHuffmanTablesAllocate(1 << LENGTHS_TABLE_BITS, &tables) ||
|
||||
!VP8LBuildHuffmanTable(&tables, LENGTHS_TABLE_BITS,
|
||||
code_length_code_lengths, NUM_CODE_LENGTH_CODES)) {
|
||||
goto End;
|
||||
}
|
||||
|
||||
|
@ -277,7 +277,7 @@ static int ReadHuffmanCodeLengths(
|
|||
int code_len;
|
||||
if (max_symbol-- == 0) break;
|
||||
VP8LFillBitWindow(br);
|
||||
p = &table[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
|
||||
p = &tables.curr_segment->start[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
|
||||
VP8LSetBitPos(br, br->bit_pos_ + p->bits);
|
||||
code_len = p->value;
|
||||
if (code_len < kCodeLengthLiterals) {
|
||||
|
@ -300,6 +300,7 @@ static int ReadHuffmanCodeLengths(
|
|||
ok = 1;
|
||||
|
||||
End:
|
||||
VP8LHuffmanTablesDeallocate(&tables);
|
||||
if (!ok) dec->status_ = VP8_STATUS_BITSTREAM_ERROR;
|
||||
return ok;
|
||||
}
|
||||
|
@ -307,7 +308,8 @@ static int ReadHuffmanCodeLengths(
|
|||
// 'code_lengths' is pre-allocated temporary buffer, used for creating Huffman
|
||||
// tree.
|
||||
static int ReadHuffmanCode(int alphabet_size, VP8LDecoder* const dec,
|
||||
int* const code_lengths, HuffmanCode* const table) {
|
||||
int* const code_lengths,
|
||||
HuffmanTables* const table) {
|
||||
int ok = 0;
|
||||
int size = 0;
|
||||
VP8LBitReader* const br = &dec->br_;
|
||||
|
@ -362,8 +364,7 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
|
|||
VP8LMetadata* const hdr = &dec->hdr_;
|
||||
uint32_t* huffman_image = NULL;
|
||||
HTreeGroup* htree_groups = NULL;
|
||||
HuffmanCode* huffman_tables = NULL;
|
||||
HuffmanCode* huffman_table = NULL;
|
||||
HuffmanTables* huffman_tables = &hdr->huffman_tables_;
|
||||
int num_htree_groups = 1;
|
||||
int num_htree_groups_max = 1;
|
||||
int max_alphabet_size = 0;
|
||||
|
@ -372,6 +373,10 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
|
|||
int* mapping = NULL;
|
||||
int ok = 0;
|
||||
|
||||
// Check the table has been 0 initialized (through InitMetadata).
|
||||
assert(huffman_tables->root.start == NULL);
|
||||
assert(huffman_tables->curr_segment == NULL);
|
||||
|
||||
if (allow_recursion && VP8LReadBits(br, 1)) {
|
||||
// use meta Huffman codes.
|
||||
const int huffman_precision = VP8LReadBits(br, 3) + 2;
|
||||
|
@ -434,16 +439,15 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
|
|||
|
||||
code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size,
|
||||
sizeof(*code_lengths));
|
||||
huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size,
|
||||
sizeof(*huffman_tables));
|
||||
htree_groups = VP8LHtreeGroupsNew(num_htree_groups);
|
||||
|
||||
if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) {
|
||||
if (htree_groups == NULL || code_lengths == NULL ||
|
||||
!VP8LHuffmanTablesAllocate(num_htree_groups * table_size,
|
||||
huffman_tables)) {
|
||||
dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
|
||||
goto Error;
|
||||
}
|
||||
|
||||
huffman_table = huffman_tables;
|
||||
for (i = 0; i < num_htree_groups_max; ++i) {
|
||||
// If the index "i" is unused in the Huffman image, just make sure the
|
||||
// coefficients are valid but do not store them.
|
||||
|
@ -468,19 +472,20 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
|
|||
int max_bits = 0;
|
||||
for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) {
|
||||
int alphabet_size = kAlphabetSize[j];
|
||||
htrees[j] = huffman_table;
|
||||
if (j == 0 && color_cache_bits > 0) {
|
||||
alphabet_size += (1 << color_cache_bits);
|
||||
}
|
||||
size = ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_table);
|
||||
size =
|
||||
ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables);
|
||||
htrees[j] = huffman_tables->curr_segment->curr_table;
|
||||
if (size == 0) {
|
||||
goto Error;
|
||||
}
|
||||
if (is_trivial_literal && kLiteralMap[j] == 1) {
|
||||
is_trivial_literal = (huffman_table->bits == 0);
|
||||
is_trivial_literal = (htrees[j]->bits == 0);
|
||||
}
|
||||
total_size += huffman_table->bits;
|
||||
huffman_table += size;
|
||||
total_size += htrees[j]->bits;
|
||||
huffman_tables->curr_segment->curr_table += size;
|
||||
if (j <= ALPHA) {
|
||||
int local_max_bits = code_lengths[0];
|
||||
int k;
|
||||
|
@ -515,14 +520,13 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
|
|||
hdr->huffman_image_ = huffman_image;
|
||||
hdr->num_htree_groups_ = num_htree_groups;
|
||||
hdr->htree_groups_ = htree_groups;
|
||||
hdr->huffman_tables_ = huffman_tables;
|
||||
|
||||
Error:
|
||||
WebPSafeFree(code_lengths);
|
||||
WebPSafeFree(mapping);
|
||||
if (!ok) {
|
||||
WebPSafeFree(huffman_image);
|
||||
WebPSafeFree(huffman_tables);
|
||||
VP8LHuffmanTablesDeallocate(huffman_tables);
|
||||
VP8LHtreeGroupsFree(htree_groups);
|
||||
}
|
||||
return ok;
|
||||
|
@ -1358,7 +1362,7 @@ static void ClearMetadata(VP8LMetadata* const hdr) {
|
|||
assert(hdr != NULL);
|
||||
|
||||
WebPSafeFree(hdr->huffman_image_);
|
||||
WebPSafeFree(hdr->huffman_tables_);
|
||||
VP8LHuffmanTablesDeallocate(&hdr->huffman_tables_);
|
||||
VP8LHtreeGroupsFree(hdr->htree_groups_);
|
||||
VP8LColorCacheClear(&hdr->color_cache_);
|
||||
VP8LColorCacheClear(&hdr->saved_color_cache_);
|
||||
|
@ -1673,7 +1677,7 @@ int VP8LDecodeImage(VP8LDecoder* const dec) {
|
|||
|
||||
if (dec == NULL) return 0;
|
||||
|
||||
assert(dec->hdr_.huffman_tables_ != NULL);
|
||||
assert(dec->hdr_.huffman_tables_.root.start != NULL);
|
||||
assert(dec->hdr_.htree_groups_ != NULL);
|
||||
assert(dec->hdr_.num_htree_groups_ > 0);
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ typedef struct {
|
|||
uint32_t* huffman_image_;
|
||||
int num_htree_groups_;
|
||||
HTreeGroup* htree_groups_;
|
||||
HuffmanCode* huffman_tables_;
|
||||
HuffmanTables huffman_tables_;
|
||||
} VP8LMetadata;
|
||||
|
||||
typedef struct VP8LDecoder VP8LDecoder;
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
|
||||
#define DMUX_MAJ_VERSION 1
|
||||
#define DMUX_MIN_VERSION 3
|
||||
#define DMUX_REV_VERSION 1
|
||||
#define DMUX_REV_VERSION 2
|
||||
|
||||
typedef struct {
|
||||
size_t start_; // start location of the data
|
||||
|
|
|
@ -32,7 +32,7 @@ extern "C" {
|
|||
// version numbers
|
||||
#define ENC_MAJ_VERSION 1
|
||||
#define ENC_MIN_VERSION 3
|
||||
#define ENC_REV_VERSION 1
|
||||
#define ENC_REV_VERSION 2
|
||||
|
||||
enum { MAX_LF_LEVELS = 64, // Maximum loop filter level
|
||||
MAX_VARIABLE_LEVEL = 67, // last (inclusive) level with variable cost
|
||||
|
|
|
@ -29,7 +29,7 @@ extern "C" {
|
|||
|
||||
#define MUX_MAJ_VERSION 1
|
||||
#define MUX_MIN_VERSION 3
|
||||
#define MUX_REV_VERSION 1
|
||||
#define MUX_REV_VERSION 2
|
||||
|
||||
// Chunk object.
|
||||
typedef struct WebPChunk WebPChunk;
|
||||
|
|
|
@ -177,21 +177,24 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
|
|||
if (num_open < 0) {
|
||||
return 0;
|
||||
}
|
||||
if (root_table == NULL) continue;
|
||||
for (; count[len] > 0; --count[len]) {
|
||||
HuffmanCode code;
|
||||
if ((key & mask) != low) {
|
||||
table += table_size;
|
||||
if (root_table != NULL) table += table_size;
|
||||
table_bits = NextTableBitSize(count, len, root_bits);
|
||||
table_size = 1 << table_bits;
|
||||
total_size += table_size;
|
||||
low = key & mask;
|
||||
root_table[low].bits = (uint8_t)(table_bits + root_bits);
|
||||
root_table[low].value = (uint16_t)((table - root_table) - low);
|
||||
if (root_table != NULL) {
|
||||
root_table[low].bits = (uint8_t)(table_bits + root_bits);
|
||||
root_table[low].value = (uint16_t)((table - root_table) - low);
|
||||
}
|
||||
}
|
||||
if (root_table != NULL) {
|
||||
code.bits = (uint8_t)(len - root_bits);
|
||||
code.value = (uint16_t)sorted[symbol++];
|
||||
ReplicateValue(&table[key >> root_bits], step, table_size, code);
|
||||
}
|
||||
code.bits = (uint8_t)(len - root_bits);
|
||||
code.value = (uint16_t)sorted[symbol++];
|
||||
ReplicateValue(&table[key >> root_bits], step, table_size, code);
|
||||
key = GetNextKey(key, len);
|
||||
}
|
||||
}
|
||||
|
@ -211,25 +214,83 @@ static int BuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
|
|||
((1 << MAX_CACHE_BITS) + NUM_LITERAL_CODES + NUM_LENGTH_CODES)
|
||||
// Cut-off value for switching between heap and stack allocation.
|
||||
#define SORTED_SIZE_CUTOFF 512
|
||||
int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
|
||||
int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
|
||||
const int code_lengths[], int code_lengths_size) {
|
||||
int total_size;
|
||||
const int total_size =
|
||||
BuildHuffmanTable(NULL, root_bits, code_lengths, code_lengths_size, NULL);
|
||||
assert(code_lengths_size <= MAX_CODE_LENGTHS_SIZE);
|
||||
if (root_table == NULL) {
|
||||
total_size = BuildHuffmanTable(NULL, root_bits,
|
||||
code_lengths, code_lengths_size, NULL);
|
||||
} else if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
|
||||
if (total_size == 0 || root_table == NULL) return total_size;
|
||||
|
||||
if (root_table->curr_segment->curr_table + total_size >=
|
||||
root_table->curr_segment->start + root_table->curr_segment->size) {
|
||||
// If 'root_table' does not have enough memory, allocate a new segment.
|
||||
// The available part of root_table->curr_segment is left unused because we
|
||||
// need a contiguous buffer.
|
||||
const int segment_size = root_table->curr_segment->size;
|
||||
struct HuffmanTablesSegment* next =
|
||||
(HuffmanTablesSegment*)WebPSafeMalloc(1, sizeof(*next));
|
||||
if (next == NULL) return 0;
|
||||
// Fill the new segment.
|
||||
// We need at least 'total_size' but if that value is small, it is better to
|
||||
// allocate a big chunk to prevent more allocations later. 'segment_size' is
|
||||
// therefore chosen (any other arbitrary value could be chosen).
|
||||
next->size = total_size > segment_size ? total_size : segment_size;
|
||||
next->start =
|
||||
(HuffmanCode*)WebPSafeMalloc(next->size, sizeof(*next->start));
|
||||
if (next->start == NULL) {
|
||||
WebPSafeFree(next);
|
||||
return 0;
|
||||
}
|
||||
next->curr_table = next->start;
|
||||
next->next = NULL;
|
||||
// Point to the new segment.
|
||||
root_table->curr_segment->next = next;
|
||||
root_table->curr_segment = next;
|
||||
}
|
||||
if (code_lengths_size <= SORTED_SIZE_CUTOFF) {
|
||||
// use local stack-allocated array.
|
||||
uint16_t sorted[SORTED_SIZE_CUTOFF];
|
||||
total_size = BuildHuffmanTable(root_table, root_bits,
|
||||
code_lengths, code_lengths_size, sorted);
|
||||
} else { // rare case. Use heap allocation.
|
||||
BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
|
||||
code_lengths, code_lengths_size, sorted);
|
||||
} else { // rare case. Use heap allocation.
|
||||
uint16_t* const sorted =
|
||||
(uint16_t*)WebPSafeMalloc(code_lengths_size, sizeof(*sorted));
|
||||
if (sorted == NULL) return 0;
|
||||
total_size = BuildHuffmanTable(root_table, root_bits,
|
||||
code_lengths, code_lengths_size, sorted);
|
||||
BuildHuffmanTable(root_table->curr_segment->curr_table, root_bits,
|
||||
code_lengths, code_lengths_size, sorted);
|
||||
WebPSafeFree(sorted);
|
||||
}
|
||||
return total_size;
|
||||
}
|
||||
|
||||
int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables) {
|
||||
// Have 'segment' point to the first segment for now, 'root'.
|
||||
HuffmanTablesSegment* const root = &huffman_tables->root;
|
||||
huffman_tables->curr_segment = root;
|
||||
// Allocate root.
|
||||
root->start = (HuffmanCode*)WebPSafeMalloc(size, sizeof(*root->start));
|
||||
if (root->start == NULL) return 0;
|
||||
root->curr_table = root->start;
|
||||
root->next = NULL;
|
||||
root->size = size;
|
||||
return 1;
|
||||
}
|
||||
|
||||
void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables) {
|
||||
HuffmanTablesSegment *current, *next;
|
||||
if (huffman_tables == NULL) return;
|
||||
// Free the root node.
|
||||
current = &huffman_tables->root;
|
||||
next = current->next;
|
||||
WebPSafeFree(current->start);
|
||||
current->start = NULL;
|
||||
current->next = NULL;
|
||||
current = next;
|
||||
// Free the following nodes.
|
||||
while (current != NULL) {
|
||||
next = current->next;
|
||||
WebPSafeFree(current->start);
|
||||
WebPSafeFree(current);
|
||||
current = next;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,6 +43,29 @@ typedef struct {
|
|||
// or non-literal symbol otherwise
|
||||
} HuffmanCode32;
|
||||
|
||||
// Contiguous memory segment of HuffmanCodes.
|
||||
typedef struct HuffmanTablesSegment {
|
||||
HuffmanCode* start;
|
||||
// Pointer to where we are writing into the segment. Starts at 'start' and
|
||||
// cannot go beyond 'start' + 'size'.
|
||||
HuffmanCode* curr_table;
|
||||
// Pointer to the next segment in the chain.
|
||||
struct HuffmanTablesSegment* next;
|
||||
int size;
|
||||
} HuffmanTablesSegment;
|
||||
|
||||
// Chained memory segments of HuffmanCodes.
|
||||
typedef struct HuffmanTables {
|
||||
HuffmanTablesSegment root;
|
||||
// Currently processed segment. At first, this is 'root'.
|
||||
HuffmanTablesSegment* curr_segment;
|
||||
} HuffmanTables;
|
||||
|
||||
// Allocates a HuffmanTables with 'size' contiguous HuffmanCodes. Returns 0 on
|
||||
// memory allocation error, 1 otherwise.
|
||||
int VP8LHuffmanTablesAllocate(int size, HuffmanTables* huffman_tables);
|
||||
void VP8LHuffmanTablesDeallocate(HuffmanTables* const huffman_tables);
|
||||
|
||||
#define HUFFMAN_PACKED_BITS 6
|
||||
#define HUFFMAN_PACKED_TABLE_SIZE (1u << HUFFMAN_PACKED_BITS)
|
||||
|
||||
|
@ -78,9 +101,7 @@ void VP8LHtreeGroupsFree(HTreeGroup* const htree_groups);
|
|||
// the huffman table.
|
||||
// Returns built table size or 0 in case of error (invalid tree or
|
||||
// memory error).
|
||||
// If root_table is NULL, it returns 0 if a lookup cannot be built, something
|
||||
// > 0 otherwise (but not the table size).
|
||||
int VP8LBuildHuffmanTable(HuffmanCode* const root_table, int root_bits,
|
||||
int VP8LBuildHuffmanTable(HuffmanTables* const root_table, int root_bits,
|
||||
const int code_lengths[], int code_lengths_size);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
|
Loading…
Reference in New Issue