1
0
mirror of https://github.com/godotengine/godot.git synced 2025-11-26 15:46:23 +00:00

Update libwebp to 0.6.1

* lossless performance and compression improvements + a new 'cruncher' mode (-m 6 -q 100)
* ARM performance improvements with clang (15-20% w/ndk r15c)
* webp-js: emscripten/webassembly based javascript decoder
* miscellaneous bug & build fixes
This commit is contained in:
volzhs
2017-12-12 02:11:11 +09:00
parent 64d104756c
commit 043103fe6a
162 changed files with 7580 additions and 6214 deletions

View File

@@ -0,0 +1,454 @@
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the COPYING file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.
// -----------------------------------------------------------------------------
//
// AnimDecoder implementation.
//
#ifdef HAVE_CONFIG_H
#include "src/webp/config.h"
#endif
#include <assert.h>
#include <string.h>
#include "src/utils/utils.h"
#include "src/webp/decode.h"
#include "src/webp/demux.h"
#define NUM_CHANNELS 4
typedef void (*BlendRowFunc)(uint32_t* const, const uint32_t* const, int);
static void BlendPixelRowNonPremult(uint32_t* const src,
const uint32_t* const dst, int num_pixels);
static void BlendPixelRowPremult(uint32_t* const src, const uint32_t* const dst,
int num_pixels);
struct WebPAnimDecoder {
WebPDemuxer* demux_; // Demuxer created from given WebP bitstream.
WebPDecoderConfig config_; // Decoder config.
// Note: we use a pointer to a function blending multiple pixels at a time to
// allow possible inlining of per-pixel blending function.
BlendRowFunc blend_func_; // Pointer to the chose blend row function.
WebPAnimInfo info_; // Global info about the animation.
uint8_t* curr_frame_; // Current canvas (not disposed).
uint8_t* prev_frame_disposed_; // Previous canvas (properly disposed).
int prev_frame_timestamp_; // Previous frame timestamp (milliseconds).
WebPIterator prev_iter_; // Iterator object for previous frame.
int prev_frame_was_keyframe_; // True if previous frame was a keyframe.
int next_frame_; // Index of the next frame to be decoded
// (starting from 1).
};
static void DefaultDecoderOptions(WebPAnimDecoderOptions* const dec_options) {
dec_options->color_mode = MODE_RGBA;
dec_options->use_threads = 0;
}
int WebPAnimDecoderOptionsInitInternal(WebPAnimDecoderOptions* dec_options,
int abi_version) {
if (dec_options == NULL ||
WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_DEMUX_ABI_VERSION)) {
return 0;
}
DefaultDecoderOptions(dec_options);
return 1;
}
static int ApplyDecoderOptions(const WebPAnimDecoderOptions* const dec_options,
WebPAnimDecoder* const dec) {
WEBP_CSP_MODE mode;
WebPDecoderConfig* config = &dec->config_;
assert(dec_options != NULL);
mode = dec_options->color_mode;
if (mode != MODE_RGBA && mode != MODE_BGRA &&
mode != MODE_rgbA && mode != MODE_bgrA) {
return 0;
}
dec->blend_func_ = (mode == MODE_RGBA || mode == MODE_BGRA)
? &BlendPixelRowNonPremult
: &BlendPixelRowPremult;
WebPInitDecoderConfig(config);
config->output.colorspace = mode;
config->output.is_external_memory = 1;
config->options.use_threads = dec_options->use_threads;
// Note: config->output.u.RGBA is set at the time of decoding each frame.
return 1;
}
WebPAnimDecoder* WebPAnimDecoderNewInternal(
const WebPData* webp_data, const WebPAnimDecoderOptions* dec_options,
int abi_version) {
WebPAnimDecoderOptions options;
WebPAnimDecoder* dec = NULL;
if (webp_data == NULL ||
WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_DEMUX_ABI_VERSION)) {
return NULL;
}
// Note: calloc() so that the pointer members are initialized to NULL.
dec = (WebPAnimDecoder*)WebPSafeCalloc(1ULL, sizeof(*dec));
if (dec == NULL) goto Error;
if (dec_options != NULL) {
options = *dec_options;
} else {
DefaultDecoderOptions(&options);
}
if (!ApplyDecoderOptions(&options, dec)) goto Error;
dec->demux_ = WebPDemux(webp_data);
if (dec->demux_ == NULL) goto Error;
dec->info_.canvas_width = WebPDemuxGetI(dec->demux_, WEBP_FF_CANVAS_WIDTH);
dec->info_.canvas_height = WebPDemuxGetI(dec->demux_, WEBP_FF_CANVAS_HEIGHT);
dec->info_.loop_count = WebPDemuxGetI(dec->demux_, WEBP_FF_LOOP_COUNT);
dec->info_.bgcolor = WebPDemuxGetI(dec->demux_, WEBP_FF_BACKGROUND_COLOR);
dec->info_.frame_count = WebPDemuxGetI(dec->demux_, WEBP_FF_FRAME_COUNT);
// Note: calloc() because we fill frame with zeroes as well.
dec->curr_frame_ = (uint8_t*)WebPSafeCalloc(
dec->info_.canvas_width * NUM_CHANNELS, dec->info_.canvas_height);
if (dec->curr_frame_ == NULL) goto Error;
dec->prev_frame_disposed_ = (uint8_t*)WebPSafeCalloc(
dec->info_.canvas_width * NUM_CHANNELS, dec->info_.canvas_height);
if (dec->prev_frame_disposed_ == NULL) goto Error;
WebPAnimDecoderReset(dec);
return dec;
Error:
WebPAnimDecoderDelete(dec);
return NULL;
}
int WebPAnimDecoderGetInfo(const WebPAnimDecoder* dec, WebPAnimInfo* info) {
if (dec == NULL || info == NULL) return 0;
*info = dec->info_;
return 1;
}
// Returns true if the frame covers the full canvas.
static int IsFullFrame(int width, int height, int canvas_width,
int canvas_height) {
return (width == canvas_width && height == canvas_height);
}
// Clear the canvas to transparent.
static int ZeroFillCanvas(uint8_t* buf, uint32_t canvas_width,
uint32_t canvas_height) {
const uint64_t size =
(uint64_t)canvas_width * canvas_height * NUM_CHANNELS * sizeof(*buf);
if (size != (size_t)size) return 0;
memset(buf, 0, (size_t)size);
return 1;
}
// Clear given frame rectangle to transparent.
static void ZeroFillFrameRect(uint8_t* buf, int buf_stride, int x_offset,
int y_offset, int width, int height) {
int j;
assert(width * NUM_CHANNELS <= buf_stride);
buf += y_offset * buf_stride + x_offset * NUM_CHANNELS;
for (j = 0; j < height; ++j) {
memset(buf, 0, width * NUM_CHANNELS);
buf += buf_stride;
}
}
// Copy width * height pixels from 'src' to 'dst'.
static int CopyCanvas(const uint8_t* src, uint8_t* dst,
uint32_t width, uint32_t height) {
const uint64_t size = (uint64_t)width * height * NUM_CHANNELS;
if (size != (size_t)size) return 0;
assert(src != NULL && dst != NULL);
memcpy(dst, src, (size_t)size);
return 1;
}
// Returns true if the current frame is a key-frame.
static int IsKeyFrame(const WebPIterator* const curr,
const WebPIterator* const prev,
int prev_frame_was_key_frame,
int canvas_width, int canvas_height) {
if (curr->frame_num == 1) {
return 1;
} else if ((!curr->has_alpha || curr->blend_method == WEBP_MUX_NO_BLEND) &&
IsFullFrame(curr->width, curr->height,
canvas_width, canvas_height)) {
return 1;
} else {
return (prev->dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
(IsFullFrame(prev->width, prev->height, canvas_width,
canvas_height) ||
prev_frame_was_key_frame);
}
}
// Blend a single channel of 'src' over 'dst', given their alpha channel values.
// 'src' and 'dst' are assumed to be NOT pre-multiplied by alpha.
static uint8_t BlendChannelNonPremult(uint32_t src, uint8_t src_a,
uint32_t dst, uint8_t dst_a,
uint32_t scale, int shift) {
const uint8_t src_channel = (src >> shift) & 0xff;
const uint8_t dst_channel = (dst >> shift) & 0xff;
const uint32_t blend_unscaled = src_channel * src_a + dst_channel * dst_a;
assert(blend_unscaled < (1ULL << 32) / scale);
return (blend_unscaled * scale) >> 24;
}
// Blend 'src' over 'dst' assuming they are NOT pre-multiplied by alpha.
static uint32_t BlendPixelNonPremult(uint32_t src, uint32_t dst) {
const uint8_t src_a = (src >> 24) & 0xff;
if (src_a == 0) {
return dst;
} else {
const uint8_t dst_a = (dst >> 24) & 0xff;
// This is the approximate integer arithmetic for the actual formula:
// dst_factor_a = (dst_a * (255 - src_a)) / 255.
const uint8_t dst_factor_a = (dst_a * (256 - src_a)) >> 8;
const uint8_t blend_a = src_a + dst_factor_a;
const uint32_t scale = (1UL << 24) / blend_a;
const uint8_t blend_r =
BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 0);
const uint8_t blend_g =
BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 8);
const uint8_t blend_b =
BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 16);
assert(src_a + dst_factor_a < 256);
return (blend_r << 0) |
(blend_g << 8) |
(blend_b << 16) |
((uint32_t)blend_a << 24);
}
}
// Blend 'num_pixels' in 'src' over 'dst' assuming they are NOT pre-multiplied
// by alpha.
static void BlendPixelRowNonPremult(uint32_t* const src,
const uint32_t* const dst, int num_pixels) {
int i;
for (i = 0; i < num_pixels; ++i) {
const uint8_t src_alpha = (src[i] >> 24) & 0xff;
if (src_alpha != 0xff) {
src[i] = BlendPixelNonPremult(src[i], dst[i]);
}
}
}
// Individually multiply each channel in 'pix' by 'scale'.
static WEBP_INLINE uint32_t ChannelwiseMultiply(uint32_t pix, uint32_t scale) {
uint32_t mask = 0x00FF00FF;
uint32_t rb = ((pix & mask) * scale) >> 8;
uint32_t ag = ((pix >> 8) & mask) * scale;
return (rb & mask) | (ag & ~mask);
}
// Blend 'src' over 'dst' assuming they are pre-multiplied by alpha.
static uint32_t BlendPixelPremult(uint32_t src, uint32_t dst) {
const uint8_t src_a = (src >> 24) & 0xff;
return src + ChannelwiseMultiply(dst, 256 - src_a);
}
// Blend 'num_pixels' in 'src' over 'dst' assuming they are pre-multiplied by
// alpha.
static void BlendPixelRowPremult(uint32_t* const src, const uint32_t* const dst,
int num_pixels) {
int i;
for (i = 0; i < num_pixels; ++i) {
const uint8_t src_alpha = (src[i] >> 24) & 0xff;
if (src_alpha != 0xff) {
src[i] = BlendPixelPremult(src[i], dst[i]);
}
}
}
// Returns two ranges (<left, width> pairs) at row 'canvas_y', that belong to
// 'src' but not 'dst'. A point range is empty if the corresponding width is 0.
static void FindBlendRangeAtRow(const WebPIterator* const src,
const WebPIterator* const dst, int canvas_y,
int* const left1, int* const width1,
int* const left2, int* const width2) {
const int src_max_x = src->x_offset + src->width;
const int dst_max_x = dst->x_offset + dst->width;
const int dst_max_y = dst->y_offset + dst->height;
assert(canvas_y >= src->y_offset && canvas_y < (src->y_offset + src->height));
*left1 = -1;
*width1 = 0;
*left2 = -1;
*width2 = 0;
if (canvas_y < dst->y_offset || canvas_y >= dst_max_y ||
src->x_offset >= dst_max_x || src_max_x <= dst->x_offset) {
*left1 = src->x_offset;
*width1 = src->width;
return;
}
if (src->x_offset < dst->x_offset) {
*left1 = src->x_offset;
*width1 = dst->x_offset - src->x_offset;
}
if (src_max_x > dst_max_x) {
*left2 = dst_max_x;
*width2 = src_max_x - dst_max_x;
}
}
int WebPAnimDecoderGetNext(WebPAnimDecoder* dec,
uint8_t** buf_ptr, int* timestamp_ptr) {
WebPIterator iter;
uint32_t width;
uint32_t height;
int is_key_frame;
int timestamp;
BlendRowFunc blend_row;
if (dec == NULL || buf_ptr == NULL || timestamp_ptr == NULL) return 0;
if (!WebPAnimDecoderHasMoreFrames(dec)) return 0;
width = dec->info_.canvas_width;
height = dec->info_.canvas_height;
blend_row = dec->blend_func_;
// Get compressed frame.
if (!WebPDemuxGetFrame(dec->demux_, dec->next_frame_, &iter)) {
return 0;
}
timestamp = dec->prev_frame_timestamp_ + iter.duration;
// Initialize.
is_key_frame = IsKeyFrame(&iter, &dec->prev_iter_,
dec->prev_frame_was_keyframe_, width, height);
if (is_key_frame) {
if (!ZeroFillCanvas(dec->curr_frame_, width, height)) {
goto Error;
}
} else {
if (!CopyCanvas(dec->prev_frame_disposed_, dec->curr_frame_,
width, height)) {
goto Error;
}
}
// Decode.
{
const uint8_t* in = iter.fragment.bytes;
const size_t in_size = iter.fragment.size;
const size_t out_offset =
(iter.y_offset * width + iter.x_offset) * NUM_CHANNELS;
WebPDecoderConfig* const config = &dec->config_;
WebPRGBABuffer* const buf = &config->output.u.RGBA;
buf->stride = NUM_CHANNELS * width;
buf->size = buf->stride * iter.height;
buf->rgba = dec->curr_frame_ + out_offset;
if (WebPDecode(in, in_size, config) != VP8_STATUS_OK) {
goto Error;
}
}
// During the decoding of current frame, we may have set some pixels to be
// transparent (i.e. alpha < 255). However, the value of each of these
// pixels should have been determined by blending it against the value of
// that pixel in the previous frame if blending method of is WEBP_MUX_BLEND.
if (iter.frame_num > 1 && iter.blend_method == WEBP_MUX_BLEND &&
!is_key_frame) {
if (dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_NONE) {
int y;
// Blend transparent pixels with pixels in previous canvas.
for (y = 0; y < iter.height; ++y) {
const size_t offset =
(iter.y_offset + y) * width + iter.x_offset;
blend_row((uint32_t*)dec->curr_frame_ + offset,
(uint32_t*)dec->prev_frame_disposed_ + offset, iter.width);
}
} else {
int y;
assert(dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND);
// We need to blend a transparent pixel with its value just after
// initialization. That is, blend it with:
// * Fully transparent pixel if it belongs to prevRect <-- No-op.
// * The pixel in the previous canvas otherwise <-- Need alpha-blending.
for (y = 0; y < iter.height; ++y) {
const int canvas_y = iter.y_offset + y;
int left1, width1, left2, width2;
FindBlendRangeAtRow(&iter, &dec->prev_iter_, canvas_y, &left1, &width1,
&left2, &width2);
if (width1 > 0) {
const size_t offset1 = canvas_y * width + left1;
blend_row((uint32_t*)dec->curr_frame_ + offset1,
(uint32_t*)dec->prev_frame_disposed_ + offset1, width1);
}
if (width2 > 0) {
const size_t offset2 = canvas_y * width + left2;
blend_row((uint32_t*)dec->curr_frame_ + offset2,
(uint32_t*)dec->prev_frame_disposed_ + offset2, width2);
}
}
}
}
// Update info of the previous frame and dispose it for the next iteration.
dec->prev_frame_timestamp_ = timestamp;
WebPDemuxReleaseIterator(&dec->prev_iter_);
dec->prev_iter_ = iter;
dec->prev_frame_was_keyframe_ = is_key_frame;
CopyCanvas(dec->curr_frame_, dec->prev_frame_disposed_, width, height);
if (dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
ZeroFillFrameRect(dec->prev_frame_disposed_, width * NUM_CHANNELS,
dec->prev_iter_.x_offset, dec->prev_iter_.y_offset,
dec->prev_iter_.width, dec->prev_iter_.height);
}
++dec->next_frame_;
// All OK, fill in the values.
*buf_ptr = dec->curr_frame_;
*timestamp_ptr = timestamp;
return 1;
Error:
WebPDemuxReleaseIterator(&iter);
return 0;
}
int WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder* dec) {
if (dec == NULL) return 0;
return (dec->next_frame_ <= (int)dec->info_.frame_count);
}
void WebPAnimDecoderReset(WebPAnimDecoder* dec) {
if (dec != NULL) {
dec->prev_frame_timestamp_ = 0;
WebPDemuxReleaseIterator(&dec->prev_iter_);
memset(&dec->prev_iter_, 0, sizeof(dec->prev_iter_));
dec->prev_frame_was_keyframe_ = 0;
dec->next_frame_ = 1;
}
}
const WebPDemuxer* WebPAnimDecoderGetDemuxer(const WebPAnimDecoder* dec) {
if (dec == NULL) return NULL;
return dec->demux_;
}
void WebPAnimDecoderDelete(WebPAnimDecoder* dec) {
if (dec != NULL) {
WebPDemuxReleaseIterator(&dec->prev_iter_);
WebPDemuxDelete(dec->demux_);
WebPSafeFree(dec->curr_frame_);
WebPSafeFree(dec->prev_frame_disposed_);
WebPSafeFree(dec);
}
}

967
thirdparty/libwebp/src/demux/demux.c vendored Normal file
View File

@@ -0,0 +1,967 @@
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the COPYING file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.
// -----------------------------------------------------------------------------
//
// WebP container demux.
//
#ifdef HAVE_CONFIG_H
#include "src/webp/config.h"
#endif
#include <assert.h>
#include <stdlib.h>
#include <string.h>
#include "src/utils/utils.h"
#include "src/webp/decode.h" // WebPGetFeatures
#include "src/webp/demux.h"
#include "src/webp/format_constants.h"
#define DMUX_MAJ_VERSION 0
#define DMUX_MIN_VERSION 3
#define DMUX_REV_VERSION 3
typedef struct {
size_t start_; // start location of the data
size_t end_; // end location
size_t riff_end_; // riff chunk end location, can be > end_.
size_t buf_size_; // size of the buffer
const uint8_t* buf_;
} MemBuffer;
typedef struct {
size_t offset_;
size_t size_;
} ChunkData;
typedef struct Frame {
int x_offset_, y_offset_;
int width_, height_;
int has_alpha_;
int duration_;
WebPMuxAnimDispose dispose_method_;
WebPMuxAnimBlend blend_method_;
int frame_num_;
int complete_; // img_components_ contains a full image.
ChunkData img_components_[2]; // 0=VP8{,L} 1=ALPH
struct Frame* next_;
} Frame;
typedef struct Chunk {
ChunkData data_;
struct Chunk* next_;
} Chunk;
struct WebPDemuxer {
MemBuffer mem_;
WebPDemuxState state_;
int is_ext_format_;
uint32_t feature_flags_;
int canvas_width_, canvas_height_;
int loop_count_;
uint32_t bgcolor_;
int num_frames_;
Frame* frames_;
Frame** frames_tail_;
Chunk* chunks_; // non-image chunks
Chunk** chunks_tail_;
};
typedef enum {
PARSE_OK,
PARSE_NEED_MORE_DATA,
PARSE_ERROR
} ParseStatus;
typedef struct ChunkParser {
uint8_t id[4];
ParseStatus (*parse)(WebPDemuxer* const dmux);
int (*valid)(const WebPDemuxer* const dmux);
} ChunkParser;
static ParseStatus ParseSingleImage(WebPDemuxer* const dmux);
static ParseStatus ParseVP8X(WebPDemuxer* const dmux);
static int IsValidSimpleFormat(const WebPDemuxer* const dmux);
static int IsValidExtendedFormat(const WebPDemuxer* const dmux);
static const ChunkParser kMasterChunks[] = {
{ { 'V', 'P', '8', ' ' }, ParseSingleImage, IsValidSimpleFormat },
{ { 'V', 'P', '8', 'L' }, ParseSingleImage, IsValidSimpleFormat },
{ { 'V', 'P', '8', 'X' }, ParseVP8X, IsValidExtendedFormat },
{ { '0', '0', '0', '0' }, NULL, NULL },
};
//------------------------------------------------------------------------------
int WebPGetDemuxVersion(void) {
return (DMUX_MAJ_VERSION << 16) | (DMUX_MIN_VERSION << 8) | DMUX_REV_VERSION;
}
// -----------------------------------------------------------------------------
// MemBuffer
static int RemapMemBuffer(MemBuffer* const mem,
const uint8_t* data, size_t size) {
if (size < mem->buf_size_) return 0; // can't remap to a shorter buffer!
mem->buf_ = data;
mem->end_ = mem->buf_size_ = size;
return 1;
}
static int InitMemBuffer(MemBuffer* const mem,
const uint8_t* data, size_t size) {
memset(mem, 0, sizeof(*mem));
return RemapMemBuffer(mem, data, size);
}
// Return the remaining data size available in 'mem'.
static WEBP_INLINE size_t MemDataSize(const MemBuffer* const mem) {
return (mem->end_ - mem->start_);
}
// Return true if 'size' exceeds the end of the RIFF chunk.
static WEBP_INLINE int SizeIsInvalid(const MemBuffer* const mem, size_t size) {
return (size > mem->riff_end_ - mem->start_);
}
static WEBP_INLINE void Skip(MemBuffer* const mem, size_t size) {
mem->start_ += size;
}
static WEBP_INLINE void Rewind(MemBuffer* const mem, size_t size) {
mem->start_ -= size;
}
static WEBP_INLINE const uint8_t* GetBuffer(MemBuffer* const mem) {
return mem->buf_ + mem->start_;
}
// Read from 'mem' and skip the read bytes.
static WEBP_INLINE uint8_t ReadByte(MemBuffer* const mem) {
const uint8_t byte = mem->buf_[mem->start_];
Skip(mem, 1);
return byte;
}
static WEBP_INLINE int ReadLE16s(MemBuffer* const mem) {
const uint8_t* const data = mem->buf_ + mem->start_;
const int val = GetLE16(data);
Skip(mem, 2);
return val;
}
static WEBP_INLINE int ReadLE24s(MemBuffer* const mem) {
const uint8_t* const data = mem->buf_ + mem->start_;
const int val = GetLE24(data);
Skip(mem, 3);
return val;
}
static WEBP_INLINE uint32_t ReadLE32(MemBuffer* const mem) {
const uint8_t* const data = mem->buf_ + mem->start_;
const uint32_t val = GetLE32(data);
Skip(mem, 4);
return val;
}
// -----------------------------------------------------------------------------
// Secondary chunk parsing
static void AddChunk(WebPDemuxer* const dmux, Chunk* const chunk) {
*dmux->chunks_tail_ = chunk;
chunk->next_ = NULL;
dmux->chunks_tail_ = &chunk->next_;
}
// Add a frame to the end of the list, ensuring the last frame is complete.
// Returns true on success, false otherwise.
static int AddFrame(WebPDemuxer* const dmux, Frame* const frame) {
const Frame* const last_frame = *dmux->frames_tail_;
if (last_frame != NULL && !last_frame->complete_) return 0;
*dmux->frames_tail_ = frame;
frame->next_ = NULL;
dmux->frames_tail_ = &frame->next_;
return 1;
}
static void SetFrameInfo(size_t start_offset, size_t size,
int frame_num, int complete,
const WebPBitstreamFeatures* const features,
Frame* const frame) {
frame->img_components_[0].offset_ = start_offset;
frame->img_components_[0].size_ = size;
frame->width_ = features->width;
frame->height_ = features->height;
frame->has_alpha_ |= features->has_alpha;
frame->frame_num_ = frame_num;
frame->complete_ = complete;
}
// Store image bearing chunks to 'frame'. 'min_size' is an optional size
// requirement, it may be zero.
static ParseStatus StoreFrame(int frame_num, uint32_t min_size,
MemBuffer* const mem, Frame* const frame) {
int alpha_chunks = 0;
int image_chunks = 0;
int done = (MemDataSize(mem) < CHUNK_HEADER_SIZE ||
MemDataSize(mem) < min_size);
ParseStatus status = PARSE_OK;
if (done) return PARSE_NEED_MORE_DATA;
do {
const size_t chunk_start_offset = mem->start_;
const uint32_t fourcc = ReadLE32(mem);
const uint32_t payload_size = ReadLE32(mem);
const uint32_t payload_size_padded = payload_size + (payload_size & 1);
const size_t payload_available = (payload_size_padded > MemDataSize(mem))
? MemDataSize(mem) : payload_size_padded;
const size_t chunk_size = CHUNK_HEADER_SIZE + payload_available;
if (payload_size > MAX_CHUNK_PAYLOAD) return PARSE_ERROR;
if (SizeIsInvalid(mem, payload_size_padded)) return PARSE_ERROR;
if (payload_size_padded > MemDataSize(mem)) status = PARSE_NEED_MORE_DATA;
switch (fourcc) {
case MKFOURCC('A', 'L', 'P', 'H'):
if (alpha_chunks == 0) {
++alpha_chunks;
frame->img_components_[1].offset_ = chunk_start_offset;
frame->img_components_[1].size_ = chunk_size;
frame->has_alpha_ = 1;
frame->frame_num_ = frame_num;
Skip(mem, payload_available);
} else {
goto Done;
}
break;
case MKFOURCC('V', 'P', '8', 'L'):
if (alpha_chunks > 0) return PARSE_ERROR; // VP8L has its own alpha
// fall through
case MKFOURCC('V', 'P', '8', ' '):
if (image_chunks == 0) {
// Extract the bitstream features, tolerating failures when the data
// is incomplete.
WebPBitstreamFeatures features;
const VP8StatusCode vp8_status =
WebPGetFeatures(mem->buf_ + chunk_start_offset, chunk_size,
&features);
if (status == PARSE_NEED_MORE_DATA &&
vp8_status == VP8_STATUS_NOT_ENOUGH_DATA) {
return PARSE_NEED_MORE_DATA;
} else if (vp8_status != VP8_STATUS_OK) {
// We have enough data, and yet WebPGetFeatures() failed.
return PARSE_ERROR;
}
++image_chunks;
SetFrameInfo(chunk_start_offset, chunk_size, frame_num,
status == PARSE_OK, &features, frame);
Skip(mem, payload_available);
} else {
goto Done;
}
break;
Done:
default:
// Restore fourcc/size when moving up one level in parsing.
Rewind(mem, CHUNK_HEADER_SIZE);
done = 1;
break;
}
if (mem->start_ == mem->riff_end_) {
done = 1;
} else if (MemDataSize(mem) < CHUNK_HEADER_SIZE) {
status = PARSE_NEED_MORE_DATA;
}
} while (!done && status == PARSE_OK);
return status;
}
// Creates a new Frame if 'actual_size' is within bounds and 'mem' contains
// enough data ('min_size') to parse the payload.
// Returns PARSE_OK on success with *frame pointing to the new Frame.
// Returns PARSE_NEED_MORE_DATA with insufficient data, PARSE_ERROR otherwise.
static ParseStatus NewFrame(const MemBuffer* const mem,
uint32_t min_size, uint32_t actual_size,
Frame** frame) {
if (SizeIsInvalid(mem, min_size)) return PARSE_ERROR;
if (actual_size < min_size) return PARSE_ERROR;
if (MemDataSize(mem) < min_size) return PARSE_NEED_MORE_DATA;
*frame = (Frame*)WebPSafeCalloc(1ULL, sizeof(**frame));
return (*frame == NULL) ? PARSE_ERROR : PARSE_OK;
}
// Parse a 'ANMF' chunk and any image bearing chunks that immediately follow.
// 'frame_chunk_size' is the previously validated, padded chunk size.
static ParseStatus ParseAnimationFrame(
WebPDemuxer* const dmux, uint32_t frame_chunk_size) {
const int is_animation = !!(dmux->feature_flags_ & ANIMATION_FLAG);
const uint32_t anmf_payload_size = frame_chunk_size - ANMF_CHUNK_SIZE;
int added_frame = 0;
int bits;
MemBuffer* const mem = &dmux->mem_;
Frame* frame;
ParseStatus status =
NewFrame(mem, ANMF_CHUNK_SIZE, frame_chunk_size, &frame);
if (status != PARSE_OK) return status;
frame->x_offset_ = 2 * ReadLE24s(mem);
frame->y_offset_ = 2 * ReadLE24s(mem);
frame->width_ = 1 + ReadLE24s(mem);
frame->height_ = 1 + ReadLE24s(mem);
frame->duration_ = ReadLE24s(mem);
bits = ReadByte(mem);
frame->dispose_method_ =
(bits & 1) ? WEBP_MUX_DISPOSE_BACKGROUND : WEBP_MUX_DISPOSE_NONE;
frame->blend_method_ = (bits & 2) ? WEBP_MUX_NO_BLEND : WEBP_MUX_BLEND;
if (frame->width_ * (uint64_t)frame->height_ >= MAX_IMAGE_AREA) {
WebPSafeFree(frame);
return PARSE_ERROR;
}
// Store a frame only if the animation flag is set there is some data for
// this frame is available.
status = StoreFrame(dmux->num_frames_ + 1, anmf_payload_size, mem, frame);
if (status != PARSE_ERROR && is_animation && frame->frame_num_ > 0) {
added_frame = AddFrame(dmux, frame);
if (added_frame) {
++dmux->num_frames_;
} else {
status = PARSE_ERROR;
}
}
if (!added_frame) WebPSafeFree(frame);
return status;
}
// General chunk storage, starting with the header at 'start_offset', allowing
// the user to request the payload via a fourcc string. 'size' includes the
// header and the unpadded payload size.
// Returns true on success, false otherwise.
static int StoreChunk(WebPDemuxer* const dmux,
size_t start_offset, uint32_t size) {
Chunk* const chunk = (Chunk*)WebPSafeCalloc(1ULL, sizeof(*chunk));
if (chunk == NULL) return 0;
chunk->data_.offset_ = start_offset;
chunk->data_.size_ = size;
AddChunk(dmux, chunk);
return 1;
}
// -----------------------------------------------------------------------------
// Primary chunk parsing
static ParseStatus ReadHeader(MemBuffer* const mem) {
const size_t min_size = RIFF_HEADER_SIZE + CHUNK_HEADER_SIZE;
uint32_t riff_size;
// Basic file level validation.
if (MemDataSize(mem) < min_size) return PARSE_NEED_MORE_DATA;
if (memcmp(GetBuffer(mem), "RIFF", CHUNK_SIZE_BYTES) ||
memcmp(GetBuffer(mem) + CHUNK_HEADER_SIZE, "WEBP", CHUNK_SIZE_BYTES)) {
return PARSE_ERROR;
}
riff_size = GetLE32(GetBuffer(mem) + TAG_SIZE);
if (riff_size < CHUNK_HEADER_SIZE) return PARSE_ERROR;
if (riff_size > MAX_CHUNK_PAYLOAD) return PARSE_ERROR;
// There's no point in reading past the end of the RIFF chunk
mem->riff_end_ = riff_size + CHUNK_HEADER_SIZE;
if (mem->buf_size_ > mem->riff_end_) {
mem->buf_size_ = mem->end_ = mem->riff_end_;
}
Skip(mem, RIFF_HEADER_SIZE);
return PARSE_OK;
}
static ParseStatus ParseSingleImage(WebPDemuxer* const dmux) {
const size_t min_size = CHUNK_HEADER_SIZE;
MemBuffer* const mem = &dmux->mem_;
Frame* frame;
ParseStatus status;
int image_added = 0;
if (dmux->frames_ != NULL) return PARSE_ERROR;
if (SizeIsInvalid(mem, min_size)) return PARSE_ERROR;
if (MemDataSize(mem) < min_size) return PARSE_NEED_MORE_DATA;
frame = (Frame*)WebPSafeCalloc(1ULL, sizeof(*frame));
if (frame == NULL) return PARSE_ERROR;
// For the single image case we allow parsing of a partial frame, so no
// minimum size is imposed here.
status = StoreFrame(1, 0, &dmux->mem_, frame);
if (status != PARSE_ERROR) {
const int has_alpha = !!(dmux->feature_flags_ & ALPHA_FLAG);
// Clear any alpha when the alpha flag is missing.
if (!has_alpha && frame->img_components_[1].size_ > 0) {
frame->img_components_[1].offset_ = 0;
frame->img_components_[1].size_ = 0;
frame->has_alpha_ = 0;
}
// Use the frame width/height as the canvas values for non-vp8x files.
// Also, set ALPHA_FLAG if this is a lossless image with alpha.
if (!dmux->is_ext_format_ && frame->width_ > 0 && frame->height_ > 0) {
dmux->state_ = WEBP_DEMUX_PARSED_HEADER;
dmux->canvas_width_ = frame->width_;
dmux->canvas_height_ = frame->height_;
dmux->feature_flags_ |= frame->has_alpha_ ? ALPHA_FLAG : 0;
}
if (!AddFrame(dmux, frame)) {
status = PARSE_ERROR; // last frame was left incomplete
} else {
image_added = 1;
dmux->num_frames_ = 1;
}
}
if (!image_added) WebPSafeFree(frame);
return status;
}
static ParseStatus ParseVP8XChunks(WebPDemuxer* const dmux) {
const int is_animation = !!(dmux->feature_flags_ & ANIMATION_FLAG);
MemBuffer* const mem = &dmux->mem_;
int anim_chunks = 0;
ParseStatus status = PARSE_OK;
do {
int store_chunk = 1;
const size_t chunk_start_offset = mem->start_;
const uint32_t fourcc = ReadLE32(mem);
const uint32_t chunk_size = ReadLE32(mem);
const uint32_t chunk_size_padded = chunk_size + (chunk_size & 1);
if (chunk_size > MAX_CHUNK_PAYLOAD) return PARSE_ERROR;
if (SizeIsInvalid(mem, chunk_size_padded)) return PARSE_ERROR;
switch (fourcc) {
case MKFOURCC('V', 'P', '8', 'X'): {
return PARSE_ERROR;
}
case MKFOURCC('A', 'L', 'P', 'H'):
case MKFOURCC('V', 'P', '8', ' '):
case MKFOURCC('V', 'P', '8', 'L'): {
// check that this isn't an animation (all frames should be in an ANMF).
if (anim_chunks > 0 || is_animation) return PARSE_ERROR;
Rewind(mem, CHUNK_HEADER_SIZE);
status = ParseSingleImage(dmux);
break;
}
case MKFOURCC('A', 'N', 'I', 'M'): {
if (chunk_size_padded < ANIM_CHUNK_SIZE) return PARSE_ERROR;
if (MemDataSize(mem) < chunk_size_padded) {
status = PARSE_NEED_MORE_DATA;
} else if (anim_chunks == 0) {
++anim_chunks;
dmux->bgcolor_ = ReadLE32(mem);
dmux->loop_count_ = ReadLE16s(mem);
Skip(mem, chunk_size_padded - ANIM_CHUNK_SIZE);
} else {
store_chunk = 0;
goto Skip;
}
break;
}
case MKFOURCC('A', 'N', 'M', 'F'): {
if (anim_chunks == 0) return PARSE_ERROR; // 'ANIM' precedes frames.
status = ParseAnimationFrame(dmux, chunk_size_padded);
break;
}
case MKFOURCC('I', 'C', 'C', 'P'): {
store_chunk = !!(dmux->feature_flags_ & ICCP_FLAG);
goto Skip;
}
case MKFOURCC('E', 'X', 'I', 'F'): {
store_chunk = !!(dmux->feature_flags_ & EXIF_FLAG);
goto Skip;
}
case MKFOURCC('X', 'M', 'P', ' '): {
store_chunk = !!(dmux->feature_flags_ & XMP_FLAG);
goto Skip;
}
Skip:
default: {
if (chunk_size_padded <= MemDataSize(mem)) {
if (store_chunk) {
// Store only the chunk header and unpadded size as only the payload
// will be returned to the user.
if (!StoreChunk(dmux, chunk_start_offset,
CHUNK_HEADER_SIZE + chunk_size)) {
return PARSE_ERROR;
}
}
Skip(mem, chunk_size_padded);
} else {
status = PARSE_NEED_MORE_DATA;
}
}
}
if (mem->start_ == mem->riff_end_) {
break;
} else if (MemDataSize(mem) < CHUNK_HEADER_SIZE) {
status = PARSE_NEED_MORE_DATA;
}
} while (status == PARSE_OK);
return status;
}
static ParseStatus ParseVP8X(WebPDemuxer* const dmux) {
MemBuffer* const mem = &dmux->mem_;
uint32_t vp8x_size;
if (MemDataSize(mem) < CHUNK_HEADER_SIZE) return PARSE_NEED_MORE_DATA;
dmux->is_ext_format_ = 1;
Skip(mem, TAG_SIZE); // VP8X
vp8x_size = ReadLE32(mem);
if (vp8x_size > MAX_CHUNK_PAYLOAD) return PARSE_ERROR;
if (vp8x_size < VP8X_CHUNK_SIZE) return PARSE_ERROR;
vp8x_size += vp8x_size & 1;
if (SizeIsInvalid(mem, vp8x_size)) return PARSE_ERROR;
if (MemDataSize(mem) < vp8x_size) return PARSE_NEED_MORE_DATA;
dmux->feature_flags_ = ReadByte(mem);
Skip(mem, 3); // Reserved.
dmux->canvas_width_ = 1 + ReadLE24s(mem);
dmux->canvas_height_ = 1 + ReadLE24s(mem);
if (dmux->canvas_width_ * (uint64_t)dmux->canvas_height_ >= MAX_IMAGE_AREA) {
return PARSE_ERROR; // image final dimension is too large
}
Skip(mem, vp8x_size - VP8X_CHUNK_SIZE); // skip any trailing data.
dmux->state_ = WEBP_DEMUX_PARSED_HEADER;
if (SizeIsInvalid(mem, CHUNK_HEADER_SIZE)) return PARSE_ERROR;
if (MemDataSize(mem) < CHUNK_HEADER_SIZE) return PARSE_NEED_MORE_DATA;
return ParseVP8XChunks(dmux);
}
// -----------------------------------------------------------------------------
// Format validation
static int IsValidSimpleFormat(const WebPDemuxer* const dmux) {
const Frame* const frame = dmux->frames_;
if (dmux->state_ == WEBP_DEMUX_PARSING_HEADER) return 1;
if (dmux->canvas_width_ <= 0 || dmux->canvas_height_ <= 0) return 0;
if (dmux->state_ == WEBP_DEMUX_DONE && frame == NULL) return 0;
if (frame->width_ <= 0 || frame->height_ <= 0) return 0;
return 1;
}
// If 'exact' is true, check that the image resolution matches the canvas.
// If 'exact' is false, check that the x/y offsets do not exceed the canvas.
static int CheckFrameBounds(const Frame* const frame, int exact,
int canvas_width, int canvas_height) {
if (exact) {
if (frame->x_offset_ != 0 || frame->y_offset_ != 0) {
return 0;
}
if (frame->width_ != canvas_width || frame->height_ != canvas_height) {
return 0;
}
} else {
if (frame->x_offset_ < 0 || frame->y_offset_ < 0) return 0;
if (frame->width_ + frame->x_offset_ > canvas_width) return 0;
if (frame->height_ + frame->y_offset_ > canvas_height) return 0;
}
return 1;
}
static int IsValidExtendedFormat(const WebPDemuxer* const dmux) {
const int is_animation = !!(dmux->feature_flags_ & ANIMATION_FLAG);
const Frame* f = dmux->frames_;
if (dmux->state_ == WEBP_DEMUX_PARSING_HEADER) return 1;
if (dmux->canvas_width_ <= 0 || dmux->canvas_height_ <= 0) return 0;
if (dmux->loop_count_ < 0) return 0;
if (dmux->state_ == WEBP_DEMUX_DONE && dmux->frames_ == NULL) return 0;
if (dmux->feature_flags_ & ~ALL_VALID_FLAGS) return 0; // invalid bitstream
while (f != NULL) {
const int cur_frame_set = f->frame_num_;
int frame_count = 0;
// Check frame properties.
for (; f != NULL && f->frame_num_ == cur_frame_set; f = f->next_) {
const ChunkData* const image = f->img_components_;
const ChunkData* const alpha = f->img_components_ + 1;
if (!is_animation && f->frame_num_ > 1) return 0;
if (f->complete_) {
if (alpha->size_ == 0 && image->size_ == 0) return 0;
// Ensure alpha precedes image bitstream.
if (alpha->size_ > 0 && alpha->offset_ > image->offset_) {
return 0;
}
if (f->width_ <= 0 || f->height_ <= 0) return 0;
} else {
// There shouldn't be a partial frame in a complete file.
if (dmux->state_ == WEBP_DEMUX_DONE) return 0;
// Ensure alpha precedes image bitstream.
if (alpha->size_ > 0 && image->size_ > 0 &&
alpha->offset_ > image->offset_) {
return 0;
}
// There shouldn't be any frames after an incomplete one.
if (f->next_ != NULL) return 0;
}
if (f->width_ > 0 && f->height_ > 0 &&
!CheckFrameBounds(f, !is_animation,
dmux->canvas_width_, dmux->canvas_height_)) {
return 0;
}
++frame_count;
}
}
return 1;
}
// -----------------------------------------------------------------------------
// WebPDemuxer object
static void InitDemux(WebPDemuxer* const dmux, const MemBuffer* const mem) {
dmux->state_ = WEBP_DEMUX_PARSING_HEADER;
dmux->loop_count_ = 1;
dmux->bgcolor_ = 0xFFFFFFFF; // White background by default.
dmux->canvas_width_ = -1;
dmux->canvas_height_ = -1;
dmux->frames_tail_ = &dmux->frames_;
dmux->chunks_tail_ = &dmux->chunks_;
dmux->mem_ = *mem;
}
static ParseStatus CreateRawImageDemuxer(MemBuffer* const mem,
WebPDemuxer** demuxer) {
WebPBitstreamFeatures features;
const VP8StatusCode status =
WebPGetFeatures(mem->buf_, mem->buf_size_, &features);
*demuxer = NULL;
if (status != VP8_STATUS_OK) {
return (status == VP8_STATUS_NOT_ENOUGH_DATA) ? PARSE_NEED_MORE_DATA
: PARSE_ERROR;
}
{
WebPDemuxer* const dmux = (WebPDemuxer*)WebPSafeCalloc(1ULL, sizeof(*dmux));
Frame* const frame = (Frame*)WebPSafeCalloc(1ULL, sizeof(*frame));
if (dmux == NULL || frame == NULL) goto Error;
InitDemux(dmux, mem);
SetFrameInfo(0, mem->buf_size_, 1 /*frame_num*/, 1 /*complete*/, &features,
frame);
if (!AddFrame(dmux, frame)) goto Error;
dmux->state_ = WEBP_DEMUX_DONE;
dmux->canvas_width_ = frame->width_;
dmux->canvas_height_ = frame->height_;
dmux->feature_flags_ |= frame->has_alpha_ ? ALPHA_FLAG : 0;
dmux->num_frames_ = 1;
assert(IsValidSimpleFormat(dmux));
*demuxer = dmux;
return PARSE_OK;
Error:
WebPSafeFree(dmux);
WebPSafeFree(frame);
return PARSE_ERROR;
}
}
WebPDemuxer* WebPDemuxInternal(const WebPData* data, int allow_partial,
WebPDemuxState* state, int version) {
const ChunkParser* parser;
int partial;
ParseStatus status = PARSE_ERROR;
MemBuffer mem;
WebPDemuxer* dmux;
if (state != NULL) *state = WEBP_DEMUX_PARSE_ERROR;
if (WEBP_ABI_IS_INCOMPATIBLE(version, WEBP_DEMUX_ABI_VERSION)) return NULL;
if (data == NULL || data->bytes == NULL || data->size == 0) return NULL;
if (!InitMemBuffer(&mem, data->bytes, data->size)) return NULL;
status = ReadHeader(&mem);
if (status != PARSE_OK) {
// If parsing of the webp file header fails attempt to handle a raw
// VP8/VP8L frame. Note 'allow_partial' is ignored in this case.
if (status == PARSE_ERROR) {
status = CreateRawImageDemuxer(&mem, &dmux);
if (status == PARSE_OK) {
if (state != NULL) *state = WEBP_DEMUX_DONE;
return dmux;
}
}
if (state != NULL) {
*state = (status == PARSE_NEED_MORE_DATA) ? WEBP_DEMUX_PARSING_HEADER
: WEBP_DEMUX_PARSE_ERROR;
}
return NULL;
}
partial = (mem.buf_size_ < mem.riff_end_);
if (!allow_partial && partial) return NULL;
dmux = (WebPDemuxer*)WebPSafeCalloc(1ULL, sizeof(*dmux));
if (dmux == NULL) return NULL;
InitDemux(dmux, &mem);
status = PARSE_ERROR;
for (parser = kMasterChunks; parser->parse != NULL; ++parser) {
if (!memcmp(parser->id, GetBuffer(&dmux->mem_), TAG_SIZE)) {
status = parser->parse(dmux);
if (status == PARSE_OK) dmux->state_ = WEBP_DEMUX_DONE;
if (status == PARSE_NEED_MORE_DATA && !partial) status = PARSE_ERROR;
if (status != PARSE_ERROR && !parser->valid(dmux)) status = PARSE_ERROR;
if (status == PARSE_ERROR) dmux->state_ = WEBP_DEMUX_PARSE_ERROR;
break;
}
}
if (state != NULL) *state = dmux->state_;
if (status == PARSE_ERROR) {
WebPDemuxDelete(dmux);
return NULL;
}
return dmux;
}
void WebPDemuxDelete(WebPDemuxer* dmux) {
Chunk* c;
Frame* f;
if (dmux == NULL) return;
for (f = dmux->frames_; f != NULL;) {
Frame* const cur_frame = f;
f = f->next_;
WebPSafeFree(cur_frame);
}
for (c = dmux->chunks_; c != NULL;) {
Chunk* const cur_chunk = c;
c = c->next_;
WebPSafeFree(cur_chunk);
}
WebPSafeFree(dmux);
}
// -----------------------------------------------------------------------------
uint32_t WebPDemuxGetI(const WebPDemuxer* dmux, WebPFormatFeature feature) {
if (dmux == NULL) return 0;
switch (feature) {
case WEBP_FF_FORMAT_FLAGS: return dmux->feature_flags_;
case WEBP_FF_CANVAS_WIDTH: return (uint32_t)dmux->canvas_width_;
case WEBP_FF_CANVAS_HEIGHT: return (uint32_t)dmux->canvas_height_;
case WEBP_FF_LOOP_COUNT: return (uint32_t)dmux->loop_count_;
case WEBP_FF_BACKGROUND_COLOR: return dmux->bgcolor_;
case WEBP_FF_FRAME_COUNT: return (uint32_t)dmux->num_frames_;
}
return 0;
}
// -----------------------------------------------------------------------------
// Frame iteration
static const Frame* GetFrame(const WebPDemuxer* const dmux, int frame_num) {
const Frame* f;
for (f = dmux->frames_; f != NULL; f = f->next_) {
if (frame_num == f->frame_num_) break;
}
return f;
}
static const uint8_t* GetFramePayload(const uint8_t* const mem_buf,
const Frame* const frame,
size_t* const data_size) {
*data_size = 0;
if (frame != NULL) {
const ChunkData* const image = frame->img_components_;
const ChunkData* const alpha = frame->img_components_ + 1;
size_t start_offset = image->offset_;
*data_size = image->size_;
// if alpha exists it precedes image, update the size allowing for
// intervening chunks.
if (alpha->size_ > 0) {
const size_t inter_size = (image->offset_ > 0)
? image->offset_ - (alpha->offset_ + alpha->size_)
: 0;
start_offset = alpha->offset_;
*data_size += alpha->size_ + inter_size;
}
return mem_buf + start_offset;
}
return NULL;
}
// Create a whole 'frame' from VP8 (+ alpha) or lossless.
static int SynthesizeFrame(const WebPDemuxer* const dmux,
const Frame* const frame,
WebPIterator* const iter) {
const uint8_t* const mem_buf = dmux->mem_.buf_;
size_t payload_size = 0;
const uint8_t* const payload = GetFramePayload(mem_buf, frame, &payload_size);
if (payload == NULL) return 0;
assert(frame != NULL);
iter->frame_num = frame->frame_num_;
iter->num_frames = dmux->num_frames_;
iter->x_offset = frame->x_offset_;
iter->y_offset = frame->y_offset_;
iter->width = frame->width_;
iter->height = frame->height_;
iter->has_alpha = frame->has_alpha_;
iter->duration = frame->duration_;
iter->dispose_method = frame->dispose_method_;
iter->blend_method = frame->blend_method_;
iter->complete = frame->complete_;
iter->fragment.bytes = payload;
iter->fragment.size = payload_size;
return 1;
}
static int SetFrame(int frame_num, WebPIterator* const iter) {
const Frame* frame;
const WebPDemuxer* const dmux = (WebPDemuxer*)iter->private_;
if (dmux == NULL || frame_num < 0) return 0;
if (frame_num > dmux->num_frames_) return 0;
if (frame_num == 0) frame_num = dmux->num_frames_;
frame = GetFrame(dmux, frame_num);
if (frame == NULL) return 0;
return SynthesizeFrame(dmux, frame, iter);
}
int WebPDemuxGetFrame(const WebPDemuxer* dmux, int frame, WebPIterator* iter) {
if (iter == NULL) return 0;
memset(iter, 0, sizeof(*iter));
iter->private_ = (void*)dmux;
return SetFrame(frame, iter);
}
int WebPDemuxNextFrame(WebPIterator* iter) {
if (iter == NULL) return 0;
return SetFrame(iter->frame_num + 1, iter);
}
int WebPDemuxPrevFrame(WebPIterator* iter) {
if (iter == NULL) return 0;
if (iter->frame_num <= 1) return 0;
return SetFrame(iter->frame_num - 1, iter);
}
void WebPDemuxReleaseIterator(WebPIterator* iter) {
(void)iter;
}
// -----------------------------------------------------------------------------
// Chunk iteration
static int ChunkCount(const WebPDemuxer* const dmux, const char fourcc[4]) {
const uint8_t* const mem_buf = dmux->mem_.buf_;
const Chunk* c;
int count = 0;
for (c = dmux->chunks_; c != NULL; c = c->next_) {
const uint8_t* const header = mem_buf + c->data_.offset_;
if (!memcmp(header, fourcc, TAG_SIZE)) ++count;
}
return count;
}
static const Chunk* GetChunk(const WebPDemuxer* const dmux,
const char fourcc[4], int chunk_num) {
const uint8_t* const mem_buf = dmux->mem_.buf_;
const Chunk* c;
int count = 0;
for (c = dmux->chunks_; c != NULL; c = c->next_) {
const uint8_t* const header = mem_buf + c->data_.offset_;
if (!memcmp(header, fourcc, TAG_SIZE)) ++count;
if (count == chunk_num) break;
}
return c;
}
static int SetChunk(const char fourcc[4], int chunk_num,
WebPChunkIterator* const iter) {
const WebPDemuxer* const dmux = (WebPDemuxer*)iter->private_;
int count;
if (dmux == NULL || fourcc == NULL || chunk_num < 0) return 0;
count = ChunkCount(dmux, fourcc);
if (count == 0) return 0;
if (chunk_num == 0) chunk_num = count;
if (chunk_num <= count) {
const uint8_t* const mem_buf = dmux->mem_.buf_;
const Chunk* const chunk = GetChunk(dmux, fourcc, chunk_num);
iter->chunk.bytes = mem_buf + chunk->data_.offset_ + CHUNK_HEADER_SIZE;
iter->chunk.size = chunk->data_.size_ - CHUNK_HEADER_SIZE;
iter->num_chunks = count;
iter->chunk_num = chunk_num;
return 1;
}
return 0;
}
int WebPDemuxGetChunk(const WebPDemuxer* dmux,
const char fourcc[4], int chunk_num,
WebPChunkIterator* iter) {
if (iter == NULL) return 0;
memset(iter, 0, sizeof(*iter));
iter->private_ = (void*)dmux;
return SetChunk(fourcc, chunk_num, iter);
}
int WebPDemuxNextChunk(WebPChunkIterator* iter) {
if (iter != NULL) {
const char* const fourcc =
(const char*)iter->chunk.bytes - CHUNK_HEADER_SIZE;
return SetChunk(fourcc, iter->chunk_num + 1, iter);
}
return 0;
}
int WebPDemuxPrevChunk(WebPChunkIterator* iter) {
if (iter != NULL && iter->chunk_num > 1) {
const char* const fourcc =
(const char*)iter->chunk.bytes - CHUNK_HEADER_SIZE;
return SetChunk(fourcc, iter->chunk_num - 1, iter);
}
return 0;
}
void WebPDemuxReleaseChunkIterator(WebPChunkIterator* iter) {
(void)iter;
}