Image Sequence Support
* BMFF parser now reads moov box and sample table (stbl), which hold avifs image sequences
* libavif now reads 'avis' brands
* Split avifDecoderRead() into components for image sequences:
* avifDecoderSetSource()
* avifDecoderParse()
* avifDecoderNextImage()
* avifImageCopy()
* avifDecoderReset()
* avifDecoderRead() still exists as a simple single-image path
* Added decoder and image timings for image sequences
* Refactored codec API to not require each codec to maintain per-plane decoder instances
* avifImage can now "not own" its planes and directly point at decoder planes to avoid copies
* aviffuzz attempts to decode all images in source material twice (using avifDecoderReset())
* Switch decoder->quality to explicit [minQuantizer, maxQuantizer], update assoc. constants
* Add examples to README
diff --git a/src/avif.c b/src/avif.c
index c03e845..ea38f1c 100644
--- a/src/avif.c
+++ b/src/avif.c
@@ -86,6 +86,7 @@
case AVIF_RESULT_COLOR_ALPHA_SIZE_MISMATCH: return "Color and alpha planes size mismatch";
case AVIF_RESULT_ISPE_SIZE_MISMATCH: return "Plane sizes don't match ispe values";
case AVIF_RESULT_NO_CODEC_AVAILABLE: return "No codec available";
+ case AVIF_RESULT_NO_IMAGES_REMAINING: return "No images remaining";
case AVIF_RESULT_UNKNOWN_ERROR:
default:
break;
@@ -117,6 +118,73 @@
return avifImageCreate(0, 0, 0, AVIF_PIXEL_FORMAT_NONE);
}
+void avifImageCopy(avifImage * dstImage, avifImage * srcImage)
+{
+ avifImageFreePlanes(dstImage, AVIF_PLANES_ALL);
+
+ dstImage->width = srcImage->width;
+ dstImage->height = srcImage->height;
+ dstImage->depth = srcImage->depth;
+ dstImage->yuvFormat = srcImage->yuvFormat;
+ dstImage->yuvRange = srcImage->yuvRange;
+
+ if (srcImage->profileFormat == AVIF_PROFILE_FORMAT_ICC) {
+ avifImageSetProfileICC(dstImage, srcImage->icc.data, srcImage->icc.size);
+ } else if (srcImage->profileFormat == AVIF_PROFILE_FORMAT_NCLX) {
+ avifImageSetProfileNCLX(dstImage, &srcImage->nclx);
+ } else {
+ avifImageSetProfileNone(dstImage);
+ }
+
+ if (srcImage->rgbPlanes[AVIF_CHAN_R] && srcImage->rgbPlanes[AVIF_CHAN_G] && srcImage->rgbPlanes[AVIF_CHAN_B]) {
+ avifImageAllocatePlanes(dstImage, AVIF_PLANES_RGB);
+
+ for (int plane = 0; plane < 3; ++plane) {
+ uint32_t planeBytes = srcImage->rgbRowBytes[plane] * srcImage->height;
+ memcpy(dstImage->rgbPlanes[plane], srcImage->rgbPlanes[plane], planeBytes);
+ }
+ }
+
+ if (srcImage->yuvPlanes[AVIF_CHAN_Y] && srcImage->yuvPlanes[AVIF_CHAN_U] && srcImage->yuvPlanes[AVIF_CHAN_V]) {
+ avifImageAllocatePlanes(dstImage, AVIF_PLANES_YUV);
+
+ avifPixelFormatInfo formatInfo;
+ avifGetPixelFormatInfo(srcImage->yuvFormat, &formatInfo);
+ int uvHeight = dstImage->height >> formatInfo.chromaShiftY;
+ for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ int aomPlaneIndex = yuvPlane;
+ int planeHeight = dstImage->height;
+ if (yuvPlane == AVIF_CHAN_U) {
+ aomPlaneIndex = formatInfo.aomIndexU;
+ planeHeight = uvHeight;
+ } else if (yuvPlane == AVIF_CHAN_V) {
+ aomPlaneIndex = formatInfo.aomIndexV;
+ planeHeight = uvHeight;
+ }
+
+ if (!srcImage->yuvRowBytes[aomPlaneIndex]) {
+ // plane is absent, move on
+ continue;
+ }
+
+ for (int j = 0; j < planeHeight; ++j) {
+ uint8_t * srcRow = &srcImage->yuvPlanes[aomPlaneIndex][j * srcImage->yuvRowBytes[aomPlaneIndex]];
+ uint8_t * dstRow = &dstImage->yuvPlanes[yuvPlane][j * dstImage->yuvRowBytes[yuvPlane]];
+ memcpy(dstRow, srcRow, dstImage->yuvRowBytes[yuvPlane]);
+ }
+ }
+ }
+
+ if (srcImage->alphaPlane) {
+ avifImageAllocatePlanes(dstImage, AVIF_PLANES_A);
+ for (int j = 0; j < dstImage->height; ++j) {
+ uint8_t * srcAlphaRow = &srcImage->alphaPlane[j * srcImage->alphaRowBytes];
+ uint8_t * dstAlphaRow = &dstImage->alphaPlane[j * dstImage->alphaRowBytes];
+ memcpy(dstAlphaRow, srcAlphaRow, dstImage->alphaRowBytes);
+ }
+ }
+}
+
void avifImageDestroy(avifImage * image)
{
avifImageFreePlanes(image, AVIF_PLANES_ALL);
@@ -209,6 +277,17 @@
}
}
+void avifImageCopyDecoderAlpha(avifImage * image)
+{
+ if (image->alphaPlane && image->alphaRowBytes && image->height && image->decoderOwnsAlphaPlane) {
+ int fullSize = image->alphaRowBytes * image->height;
+ uint8_t * oldAlphaPlane = image->alphaPlane;
+ image->alphaPlane = avifAlloc(fullSize);
+ memcpy(image->alphaPlane, oldAlphaPlane, fullSize);
+ image->decoderOwnsAlphaPlane = AVIF_FALSE;
+ }
+}
+
void avifImageFreePlanes(avifImage * image, uint32_t planes)
{
if (planes & AVIF_PLANES_RGB) {
@@ -223,20 +302,26 @@
image->rgbRowBytes[AVIF_CHAN_B] = 0;
}
if ((planes & AVIF_PLANES_YUV) && (image->yuvFormat != AVIF_PIXEL_FORMAT_NONE)) {
- avifFree(image->yuvPlanes[AVIF_CHAN_Y]);
+ if (!image->decoderOwnsYUVPlanes) {
+ avifFree(image->yuvPlanes[AVIF_CHAN_Y]);
+ avifFree(image->yuvPlanes[AVIF_CHAN_U]);
+ avifFree(image->yuvPlanes[AVIF_CHAN_V]);
+ }
image->yuvPlanes[AVIF_CHAN_Y] = NULL;
image->yuvRowBytes[AVIF_CHAN_Y] = 0;
- avifFree(image->yuvPlanes[AVIF_CHAN_U]);
image->yuvPlanes[AVIF_CHAN_U] = NULL;
image->yuvRowBytes[AVIF_CHAN_U] = 0;
- avifFree(image->yuvPlanes[AVIF_CHAN_V]);
image->yuvPlanes[AVIF_CHAN_V] = NULL;
image->yuvRowBytes[AVIF_CHAN_V] = 0;
+ image->decoderOwnsYUVPlanes = AVIF_FALSE;
}
if (planes & AVIF_PLANES_A) {
- avifFree(image->alphaPlane);
+ if (!image->decoderOwnsAlphaPlane) {
+ avifFree(image->alphaPlane);
+ }
image->alphaPlane = NULL;
image->alphaRowBytes = 0;
+ image->decoderOwnsAlphaPlane = AVIF_FALSE;
}
}
diff --git a/src/codec_aom.c b/src/codec_aom.c
index 019865c..3d41208 100644
--- a/src/codec_aom.c
+++ b/src/codec_aom.c
@@ -12,143 +12,160 @@
struct avifCodecInternal
{
- avifBool decoderInitialized[AVIF_CODEC_PLANES_COUNT];
- aom_codec_ctx_t decoders[AVIF_CODEC_PLANES_COUNT];
+ avifBool decoderInitialized;
+ aom_codec_ctx_t decoder;
+ aom_codec_iter_t iter;
+ uint32_t inputSampleIndex;
+ aom_image_t * image;
- aom_image_t * images[AVIF_CODEC_PLANES_COUNT];
- avifRawData encodedOBUs[AVIF_CODEC_PLANES_COUNT];
- avifCodecConfigurationBox configs[AVIF_CODEC_PLANES_COUNT];
+ avifRawData encodedOBU;
+ avifCodecConfigurationBox config;
};
static void aomCodecDestroyInternal(avifCodec * codec)
{
- for (int plane = 0; plane < AVIF_CODEC_PLANES_COUNT; ++plane) {
- if (codec->internal->decoderInitialized[plane]) {
- aom_codec_destroy(&codec->internal->decoders[plane]);
- }
- avifRawDataFree(&codec->internal->encodedOBUs[plane]);
+ if (codec->internal->decoderInitialized) {
+ aom_codec_destroy(&codec->internal->decoder);
}
+ avifRawDataFree(&codec->internal->encodedOBU);
avifFree(codec->internal);
}
-static avifBool aomCodecDecode(avifCodec * codec, avifCodecPlanes planes, avifRawData * obu)
+avifBool aomCodecDecode(struct avifCodec * codec)
{
- aom_codec_stream_info_t si;
aom_codec_iface_t * decoder_interface = aom_codec_av1_dx();
- if (aom_codec_dec_init(&codec->internal->decoders[planes], decoder_interface, NULL, 0)) {
+ if (aom_codec_dec_init(&codec->internal->decoder, decoder_interface, NULL, 0)) {
return AVIF_FALSE;
}
- codec->internal->decoderInitialized[planes] = AVIF_TRUE;
+ codec->internal->decoderInitialized = AVIF_TRUE;
- if (aom_codec_control(&codec->internal->decoders[planes], AV1D_SET_OUTPUT_ALL_LAYERS, 1)) {
+ if (aom_codec_control(&codec->internal->decoder, AV1D_SET_OUTPUT_ALL_LAYERS, 1)) {
return AVIF_FALSE;
}
- si.is_annexb = 0;
- if (aom_codec_peek_stream_info(decoder_interface, obu->data, obu->size, &si)) {
- return AVIF_FALSE;
- }
-
- if (aom_codec_decode(&codec->internal->decoders[planes], obu->data, obu->size, NULL)) {
- return AVIF_FALSE;
- }
-
- aom_codec_iter_t iter = NULL;
- codec->internal->images[planes] = aom_codec_get_frame(&codec->internal->decoders[planes], &iter);
- return (codec->internal->images[planes]) ? AVIF_TRUE : AVIF_FALSE;
-}
-
-static avifCodecImageSize aomCodecGetImageSize(avifCodec * codec, avifCodecPlanes planes)
-{
- avifCodecImageSize size;
- if (codec->internal->images[planes]) {
- size.width = codec->internal->images[planes]->d_w;
- size.height = codec->internal->images[planes]->d_h;
- } else {
- size.width = 0;
- size.height = 0;
- }
- return size;
+ codec->internal->inputSampleIndex = 0;
+ codec->internal->iter = NULL;
+ return AVIF_TRUE;
}
static avifBool aomCodecAlphaLimitedRange(avifCodec * codec)
{
- aom_image_t * aomAlphaImage = codec->internal->images[AVIF_CODEC_PLANES_ALPHA];
- if (aomAlphaImage && (aomAlphaImage->range == AOM_CR_STUDIO_RANGE)) {
+ if (codec->decodeInput->alpha && codec->internal->image && (codec->internal->image->range == AOM_CR_STUDIO_RANGE)) {
return AVIF_TRUE;
}
return AVIF_FALSE;
}
-static avifResult aomCodecGetDecodedImage(avifCodec * codec, avifImage * image)
+static avifBool aomCodecGetNextImage(avifCodec * codec, avifImage * image)
{
- aom_image_t * aomColorImage = codec->internal->images[AVIF_CODEC_PLANES_COLOR];
- aom_image_t * aomAlphaImage = codec->internal->images[AVIF_CODEC_PLANES_ALPHA];
- avifBool hasAlpha = aomAlphaImage ? AVIF_TRUE : AVIF_FALSE;
-
- avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
- switch (aomColorImage->fmt) {
- case AOM_IMG_FMT_I420:
- case AOM_IMG_FMT_AOMI420:
- case AOM_IMG_FMT_I42016:
- yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
+ aom_image_t * nextFrame = NULL;
+ for (;;) {
+ nextFrame = aom_codec_get_frame(&codec->internal->decoder, &codec->internal->iter);
+ if (nextFrame) {
+ // Got an image!
break;
- case AOM_IMG_FMT_I422:
- case AOM_IMG_FMT_I42216:
- yuvFormat = AVIF_PIXEL_FORMAT_YUV422;
+ } else if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
+ // Feed another sample
+ avifRawData * sample = &codec->decodeInput->samples.raw[codec->internal->inputSampleIndex];
+ ++codec->internal->inputSampleIndex;
+ codec->internal->iter = NULL;
+ if (aom_codec_decode(&codec->internal->decoder, sample->data, sample->size, NULL)) {
+ return AVIF_FALSE;
+ }
+ } else {
+ // No more samples to feed
break;
- case AOM_IMG_FMT_I444:
- case AOM_IMG_FMT_I44416:
- yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
- break;
- case AOM_IMG_FMT_YV12:
- case AOM_IMG_FMT_AOMYV12:
- case AOM_IMG_FMT_YV1216:
- yuvFormat = AVIF_PIXEL_FORMAT_YV12;
- break;
- case AOM_IMG_FMT_NONE:
- default:
- break;
- }
-
- image->width = aomColorImage->d_w;
- image->height = aomColorImage->d_h;
- image->depth = aomColorImage->bit_depth;
- image->yuvFormat = yuvFormat;
- image->yuvRange = (aomColorImage->range == AOM_CR_STUDIO_RANGE) ? AVIF_RANGE_LIMITED : AVIF_RANGE_FULL;
-
- avifPixelFormatInfo formatInfo;
- avifGetPixelFormatInfo(yuvFormat, &formatInfo);
-
- int uvHeight = image->height >> formatInfo.chromaShiftY;
- avifImageAllocatePlanes(image, AVIF_PLANES_YUV);
- for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
- int aomPlaneIndex = yuvPlane;
- int planeHeight = image->height;
- if (yuvPlane == AVIF_CHAN_U) {
- aomPlaneIndex = formatInfo.aomIndexU;
- planeHeight = uvHeight;
- } else if (yuvPlane == AVIF_CHAN_V) {
- aomPlaneIndex = formatInfo.aomIndexV;
- planeHeight = uvHeight;
- }
-
- for (int j = 0; j < planeHeight; ++j) {
- uint8_t * srcRow = &aomColorImage->planes[aomPlaneIndex][j * aomColorImage->stride[aomPlaneIndex]];
- uint8_t * dstRow = &image->yuvPlanes[yuvPlane][j * image->yuvRowBytes[yuvPlane]];
- memcpy(dstRow, srcRow, image->yuvRowBytes[yuvPlane]);
}
}
- if (hasAlpha) {
- avifImageAllocatePlanes(image, AVIF_PLANES_A);
- for (int j = 0; j < image->height; ++j) {
- uint8_t * srcAlphaRow = &aomAlphaImage->planes[0][j * aomAlphaImage->stride[0]];
- uint8_t * dstAlphaRow = &image->alphaPlane[j * image->alphaRowBytes];
- memcpy(dstAlphaRow, srcAlphaRow, image->alphaRowBytes);
+ if (nextFrame) {
+ codec->internal->image = nextFrame;
+ } else {
+ if (codec->decodeInput->alpha && codec->internal->image) {
+ // Special case: reuse last alpha frame
+ } else {
+ return AVIF_FALSE;
}
}
- return AVIF_RESULT_OK;
+
+ if (!codec->internal->image) {
+ return AVIF_FALSE;
+ }
+
+ avifBool isColor = !codec->decodeInput->alpha;
+ if (isColor) {
+ // Color (YUV) planes - set image to correct size / format, fill color
+
+ avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
+ switch (codec->internal->image->fmt) {
+ case AOM_IMG_FMT_I420:
+ case AOM_IMG_FMT_AOMI420:
+ case AOM_IMG_FMT_I42016:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
+ break;
+ case AOM_IMG_FMT_I422:
+ case AOM_IMG_FMT_I42216:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV422;
+ break;
+ case AOM_IMG_FMT_I444:
+ case AOM_IMG_FMT_I44416:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
+ break;
+ case AOM_IMG_FMT_YV12:
+ case AOM_IMG_FMT_AOMYV12:
+ case AOM_IMG_FMT_YV1216:
+ yuvFormat = AVIF_PIXEL_FORMAT_YV12;
+ break;
+ case AOM_IMG_FMT_NONE:
+ default:
+ break;
+ }
+
+ if (image->width && image->height) {
+ if ((image->width != codec->internal->image->d_w) || (image->height != codec->internal->image->d_h) ||
+ (image->depth != codec->internal->image->bit_depth) || (image->yuvFormat != yuvFormat)) {
+ // Throw it all out
+ avifImageFreePlanes(image, AVIF_PLANES_ALL);
+ }
+ }
+
+ image->width = codec->internal->image->d_w;
+ image->height = codec->internal->image->d_h;
+ image->depth = codec->internal->image->bit_depth;
+ image->yuvFormat = yuvFormat;
+ image->yuvRange = (codec->internal->image->range == AOM_CR_STUDIO_RANGE) ? AVIF_RANGE_LIMITED : AVIF_RANGE_FULL;
+
+ avifPixelFormatInfo formatInfo;
+ avifGetPixelFormatInfo(yuvFormat, &formatInfo);
+
+ // Steal the pointers from the image directly
+ avifImageFreePlanes(image, AVIF_PLANES_YUV);
+ for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ int aomPlaneIndex = yuvPlane;
+ if (yuvPlane == AVIF_CHAN_U) {
+ aomPlaneIndex = formatInfo.aomIndexU;
+ } else if (yuvPlane == AVIF_CHAN_V) {
+ aomPlaneIndex = formatInfo.aomIndexV;
+ }
+ image->yuvPlanes[yuvPlane] = codec->internal->image->planes[aomPlaneIndex];
+ image->yuvRowBytes[yuvPlane] = codec->internal->image->stride[aomPlaneIndex];
+ }
+ image->decoderOwnsYUVPlanes = AVIF_TRUE;
+ } else {
+ // Alpha plane - ensure image is correct size, fill color
+
+ if ((image->width != codec->internal->image->d_w) || (image->height != codec->internal->image->d_h) ||
+ (image->depth != codec->internal->image->bit_depth)) {
+ return AVIF_FALSE;
+ }
+
+ avifImageFreePlanes(image, AVIF_PLANES_A);
+ image->alphaPlane = codec->internal->image->planes[0];
+ image->alphaRowBytes = codec->internal->image->stride[0];
+ image->decoderOwnsAlphaPlane = AVIF_TRUE;
+ }
+
+ return AVIF_TRUE;
}
static aom_img_fmt_t avifImageCalcAOMFmt(avifImage * image, avifBool alphaOnly, int * yShift)
@@ -194,11 +211,6 @@
aom_codec_iface_t * encoder_interface = aom_codec_av1_cx();
aom_codec_ctx_t aomEncoder;
- int quality = encoder->quality;
- if (alphaOnly) {
- quality = AVIF_BEST_QUALITY;
- }
-
memset(outputConfig, 0, sizeof(avifCodecConfigurationBox));
int yShift = 0;
@@ -278,13 +290,17 @@
// * 3 - CSP_RESERVED
outputConfig->chromaSamplePosition = 0;
- avifBool lossless = (quality == AVIF_BEST_QUALITY) ? AVIF_TRUE : AVIF_FALSE;
- cfg.rc_min_quantizer = 0;
- if (lossless) {
- cfg.rc_max_quantizer = 0;
- } else {
- cfg.rc_max_quantizer = quality;
+ int minQuantizer = encoder->minQuantizer;
+ int maxQuantizer = encoder->maxQuantizer;
+ if (alphaOnly) {
+ minQuantizer = AVIF_QUANTIZER_LOSSLESS;
+ maxQuantizer = AVIF_QUANTIZER_LOSSLESS;
}
+ avifBool lossless = ((encoder->minQuantizer == AVIF_QUANTIZER_LOSSLESS) && (encoder->maxQuantizer == AVIF_QUANTIZER_LOSSLESS))
+ ? AVIF_TRUE
+ : AVIF_FALSE;
+ cfg.rc_min_quantizer = minQuantizer;
+ cfg.rc_min_quantizer = maxQuantizer;
uint32_t encoderFlags = 0;
if (image->depth > 8) {
@@ -359,24 +375,17 @@
return success;
}
-static avifResult aomCodecEncodeImage(avifCodec * codec, avifImage * image, avifEncoder * encoder, avifRawData * colorOBU, avifRawData * alphaOBU)
+static avifBool aomCodecEncodeImage(avifCodec * codec, avifImage * image, avifEncoder * encoder, avifRawData * obu, avifBool alpha)
{
- if (colorOBU) {
- if (!encodeOBU(image, AVIF_FALSE, encoder, colorOBU, &codec->internal->configs[AVIF_CODEC_PLANES_COLOR])) {
- return AVIF_RESULT_ENCODE_COLOR_FAILED;
- }
+ if (!encodeOBU(image, alpha, encoder, obu, &codec->internal->config)) {
+ return AVIF_FALSE;
}
- if (alphaOBU) {
- if (!encodeOBU(image, AVIF_TRUE, encoder, alphaOBU, &codec->internal->configs[AVIF_CODEC_PLANES_ALPHA])) {
- return AVIF_RESULT_ENCODE_COLOR_FAILED;
- }
- }
- return AVIF_RESULT_OK;
+ return AVIF_TRUE;
}
-static void aomCodecGetConfigurationBox(avifCodec * codec, avifCodecPlanes planes, avifCodecConfigurationBox * outConfig)
+static void aomCodecGetConfigurationBox(avifCodec * codec, avifCodecConfigurationBox * outConfig)
{
- memcpy(outConfig, &codec->internal->configs[planes], sizeof(avifCodecConfigurationBox));
+ memcpy(outConfig, &codec->internal->config, sizeof(avifCodecConfigurationBox));
}
avifCodec * avifCodecCreateAOM()
@@ -384,9 +393,8 @@
avifCodec * codec = (avifCodec *)avifAlloc(sizeof(avifCodec));
memset(codec, 0, sizeof(struct avifCodec));
codec->decode = aomCodecDecode;
- codec->getImageSize = aomCodecGetImageSize;
codec->alphaLimitedRange = aomCodecAlphaLimitedRange;
- codec->getDecodedImage = aomCodecGetDecodedImage;
+ codec->getNextImage = aomCodecGetNextImage;
codec->encodeImage = aomCodecEncodeImage;
codec->getConfigurationBox = aomCodecGetConfigurationBox;
codec->destroyInternal = aomCodecDestroyInternal;
diff --git a/src/codec_dav1d.c b/src/codec_dav1d.c
index 8916636..ce7af15 100644
--- a/src/codec_dav1d.c
+++ b/src/codec_dav1d.c
@@ -10,133 +10,152 @@
struct avifCodecInternal
{
Dav1dSettings dav1dSettings;
- Dav1dContext * dav1dContext[AVIF_CODEC_PLANES_COUNT];
- Dav1dPicture dav1dPicture[AVIF_CODEC_PLANES_COUNT];
- avifBool hasPicture[AVIF_CODEC_PLANES_COUNT];
- avifRange colorRange[AVIF_CODEC_PLANES_COUNT];
+ Dav1dContext * dav1dContext;
+ Dav1dPicture dav1dPicture;
+ avifBool hasPicture;
+ avifRange colorRange;
+ Dav1dData dav1dData;
+ uint32_t inputSampleIndex;
};
static void dav1dCodecDestroyInternal(avifCodec * codec)
{
- for (int i = 0; i < AVIF_CODEC_PLANES_COUNT; ++i) {
- if (codec->internal->dav1dContext[i]) {
- dav1d_close(&codec->internal->dav1dContext[i]);
- }
+ if (codec->internal->dav1dContext) {
+ dav1d_close(&codec->internal->dav1dContext);
}
avifFree(codec->internal);
}
-static avifBool dav1dCodecDecode(avifCodec * codec, avifCodecPlanes planes, avifRawData * obu)
+// returns AVIF_FALSE if there's nothing left to feed, or feeding fatally fails (say that five times fast)
+static avifBool dav1dFeedData(avifCodec * codec)
{
- if (codec->internal->dav1dContext[planes] == NULL) {
- if (dav1d_open(&codec->internal->dav1dContext[planes], &codec->internal->dav1dSettings) != 0) {
+ if (!codec->internal->dav1dData.sz) {
+ dav1d_data_unref(&codec->internal->dav1dData);
+
+ if (codec->internal->inputSampleIndex < codec->decodeInput->samples.count) {
+ avifRawData * sample = &codec->decodeInput->samples.raw[codec->internal->inputSampleIndex];
+ ++codec->internal->inputSampleIndex;
+
+ // OPTIMIZE: Carefully switch this to use dav1d_data_wrap or dav1d_data_wrap_user_data
+ uint8_t * dav1dDataPtr = dav1d_data_create(&codec->internal->dav1dData, sample->size);
+ memcpy(dav1dDataPtr, sample->data, sample->size);
+ } else {
+ // No more data
return AVIF_FALSE;
}
}
- avifBool result = AVIF_FALSE;
-
- // OPTIMIZE: Carefully switch this to use dav1d_data_wrap or dav1d_data_wrap_user_data
- Dav1dData dav1dData;
- uint8_t * dav1dDataPtr = dav1d_data_create(&dav1dData, obu->size);
- memcpy(dav1dDataPtr, obu->data, obu->size);
-
- if (dav1d_send_data(codec->internal->dav1dContext[planes], &dav1dData) != 0) {
- // This could return DAV1D_ERR(EAGAIN) and not be a failure if we weren't sending the entire payload
- goto cleanup;
+ int res = dav1d_send_data(codec->internal->dav1dContext, &codec->internal->dav1dData);
+ if ((res < 0) && (res != DAV1D_ERR(EAGAIN))) {
+ return AVIF_FALSE;
}
-
- if (dav1d_get_picture(codec->internal->dav1dContext[planes], &codec->internal->dav1dPicture[planes]) != 0) {
- goto cleanup;
- }
-
- codec->internal->hasPicture[planes] = AVIF_TRUE;
- codec->internal->colorRange[planes] = codec->internal->dav1dPicture[planes].seq_hdr->color_range ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED;
- result = AVIF_TRUE;
-cleanup:
- dav1d_data_unref(&dav1dData);
- return result;
+ return AVIF_TRUE;
}
-static avifCodecImageSize dav1dCodecGetImageSize(avifCodec * codec, avifCodecPlanes planes)
+static avifBool dav1dCodecDecode(avifCodec * codec)
{
- avifCodecImageSize size;
- size.width = codec->internal->dav1dPicture[planes].p.w;
- size.height = codec->internal->dav1dPicture[planes].p.h;
- return size;
+ if (codec->internal->dav1dContext == NULL) {
+ if (dav1d_open(&codec->internal->dav1dContext, &codec->internal->dav1dSettings) != 0) {
+ return AVIF_FALSE;
+ }
+ }
+
+ codec->internal->inputSampleIndex = 0;
+ return dav1dFeedData(codec);
}
static avifBool dav1dCodecAlphaLimitedRange(avifCodec * codec)
{
- if (codec->internal->hasPicture[AVIF_CODEC_PLANES_ALPHA] &&
- (codec->internal->colorRange[AVIF_CODEC_PLANES_ALPHA] == AVIF_RANGE_LIMITED)) {
+ if (codec->decodeInput->alpha && codec->internal->hasPicture && (codec->internal->colorRange == AVIF_RANGE_LIMITED)) {
return AVIF_TRUE;
}
return AVIF_FALSE;
}
-static avifResult dav1dCodecGetDecodedImage(avifCodec * codec, avifImage * image)
+static avifBool dav1dCodecGetNextImage(avifCodec * codec, avifImage * image)
{
- Dav1dPicture * colorImage = &codec->internal->dav1dPicture[AVIF_CODEC_PLANES_COLOR];
- Dav1dPicture * alphaImage = &codec->internal->dav1dPicture[AVIF_CODEC_PLANES_ALPHA];
- avifBool hasAlpha = codec->internal->hasPicture[AVIF_CODEC_PLANES_ALPHA];
+ avifBool gotPicture = AVIF_FALSE;
+ Dav1dPicture nextFrame = { 0 };
- avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
- switch (colorImage->p.layout) {
- case DAV1D_PIXEL_LAYOUT_I400:
- case DAV1D_PIXEL_LAYOUT_I420:
- yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
- break;
- case DAV1D_PIXEL_LAYOUT_I422:
- yuvFormat = AVIF_PIXEL_FORMAT_YUV422;
- break;
- case DAV1D_PIXEL_LAYOUT_I444:
- yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
- break;
- }
-
- image->width = colorImage->p.w;
- image->height = colorImage->p.h;
- image->depth = colorImage->p.bpc;
- image->yuvFormat = yuvFormat;
- image->yuvRange = codec->internal->colorRange[AVIF_CODEC_PLANES_COLOR];
-
- avifPixelFormatInfo formatInfo;
- avifGetPixelFormatInfo(yuvFormat, &formatInfo);
-
- int uvHeight = image->height >> formatInfo.chromaShiftY;
- avifImageAllocatePlanes(image, AVIF_PLANES_YUV);
-
- for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
- int planeHeight = image->height;
- if (yuvPlane != AVIF_CHAN_Y) {
- planeHeight = uvHeight;
- }
-
- uint8_t * srcPixels = (uint8_t *)colorImage->data[yuvPlane];
- ptrdiff_t stride = colorImage->stride[(yuvPlane == AVIF_CHAN_Y) ? 0 : 1];
- for (int j = 0; j < planeHeight; ++j) {
- uint8_t * srcRow = &srcPixels[j * stride];
- uint8_t * dstRow = &image->yuvPlanes[yuvPlane][j * image->yuvRowBytes[yuvPlane]];
- memcpy(dstRow, srcRow, image->yuvRowBytes[yuvPlane]);
- }
-
- if (colorImage->p.layout == DAV1D_PIXEL_LAYOUT_I400) {
- // Don't memcpy the chroma, its not going to be there
+ for (;;) {
+ avifBool sentData = dav1dFeedData(codec);
+ int res = dav1d_get_picture(codec->internal->dav1dContext, &nextFrame);
+ if ((res < 0) && (res != DAV1D_ERR(EAGAIN)) && !sentData) {
+ // No more frames
+ return AVIF_FALSE;
+ } else {
+ // Got a picture!
+ gotPicture = AVIF_TRUE;
break;
}
}
- if (hasAlpha) {
- avifImageAllocatePlanes(image, AVIF_PLANES_A);
- uint8_t * srcAlphaPixels = (uint8_t *)&alphaImage->data[0];
- for (int j = 0; j < image->height; ++j) {
- uint8_t * srcAlphaRow = &srcAlphaPixels[j * alphaImage->stride[0]];
- uint8_t * dstAlphaRow = &image->alphaPlane[j * image->alphaRowBytes];
- memcpy(dstAlphaRow, srcAlphaRow, image->alphaRowBytes);
+ if (gotPicture) {
+ dav1d_picture_unref(&codec->internal->dav1dPicture);
+ codec->internal->dav1dPicture = nextFrame;
+ codec->internal->colorRange = codec->internal->dav1dPicture.seq_hdr->color_range ? AVIF_RANGE_FULL : AVIF_RANGE_LIMITED;
+ codec->internal->hasPicture = AVIF_TRUE;
+ } else {
+ if (codec->decodeInput->alpha && codec->internal->hasPicture) {
+ // Special case: reuse last alpha frame
+ } else {
+ return AVIF_FALSE;
}
}
- return AVIF_RESULT_OK;
+
+ Dav1dPicture * dav1dImage = &codec->internal->dav1dPicture;
+ avifBool isColor = !codec->decodeInput->alpha;
+ if (isColor) {
+ // Color (YUV) planes - set image to correct size / format, fill color
+
+ avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
+ switch (dav1dImage->p.layout) {
+ case DAV1D_PIXEL_LAYOUT_I400:
+ case DAV1D_PIXEL_LAYOUT_I420:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
+ break;
+ case DAV1D_PIXEL_LAYOUT_I422:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV422;
+ break;
+ case DAV1D_PIXEL_LAYOUT_I444:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
+ break;
+ }
+
+ if (image->width && image->height) {
+ if ((image->width != dav1dImage->p.w) || (image->height != dav1dImage->p.h) || (image->depth != dav1dImage->p.bpc) ||
+ (image->yuvFormat != yuvFormat)) {
+ // Throw it all out
+ avifImageFreePlanes(image, AVIF_PLANES_ALL);
+ }
+ }
+
+ image->width = dav1dImage->p.w;
+ image->height = dav1dImage->p.h;
+ image->depth = dav1dImage->p.bpc;
+ image->yuvFormat = yuvFormat;
+ image->yuvRange = codec->internal->colorRange;
+
+ avifPixelFormatInfo formatInfo;
+ avifGetPixelFormatInfo(yuvFormat, &formatInfo);
+
+ int uvHeight = image->height >> formatInfo.chromaShiftY;
+
+ avifImageFreePlanes(image, AVIF_PLANES_YUV);
+ for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ image->yuvPlanes[yuvPlane] = dav1dImage->data[yuvPlane];
+ image->yuvRowBytes[yuvPlane] = dav1dImage->stride[(yuvPlane == AVIF_CHAN_Y) ? 0 : 1];
+ }
+ image->decoderOwnsYUVPlanes = AVIF_TRUE;
+ } else {
+ // Alpha plane - ensure image is correct size, fill color
+
+ avifImageFreePlanes(image, AVIF_PLANES_A);
+ image->alphaPlane = dav1dImage->data[0];
+ image->alphaRowBytes = dav1dImage->stride[0];
+ image->decoderOwnsAlphaPlane = AVIF_TRUE;
+ }
+ return AVIF_TRUE;
}
avifCodec * avifCodecCreateDav1d()
@@ -144,9 +163,8 @@
avifCodec * codec = (avifCodec *)avifAlloc(sizeof(avifCodec));
memset(codec, 0, sizeof(struct avifCodec));
codec->decode = dav1dCodecDecode;
- codec->getImageSize = dav1dCodecGetImageSize;
codec->alphaLimitedRange = dav1dCodecAlphaLimitedRange;
- codec->getDecodedImage = dav1dCodecGetDecodedImage;
+ codec->getNextImage = dav1dCodecGetNextImage;
codec->destroyInternal = dav1dCodecDestroyInternal;
codec->internal = (struct avifCodecInternal *)avifAlloc(sizeof(struct avifCodecInternal));
diff --git a/src/read.c b/src/read.c
index 022eeb5..0371277 100644
--- a/src/read.c
+++ b/src/read.c
@@ -92,7 +92,6 @@
typedef struct avifSampleTableChunk
{
uint64_t offset;
- uint64_t size;
} avifSampleTableChunk;
AVIF_ARRAY_DECLARE(avifSampleTableChunkArray, avifSampleTableChunk, chunk);
@@ -125,7 +124,7 @@
avifSampleTableTimeToSampleArray timeToSamples;
} avifSampleTable;
-avifSampleTable * avifSampleTableCreate()
+static avifSampleTable * avifSampleTableCreate()
{
avifSampleTable * sampleTable = (avifSampleTable *)avifAlloc(sizeof(avifSampleTable));
memset(sampleTable, 0, sizeof(avifSampleTable));
@@ -136,7 +135,7 @@
return sampleTable;
}
-void avifSampleTableDestroy(avifSampleTable * sampleTable)
+static void avifSampleTableDestroy(avifSampleTable * sampleTable)
{
avifArrayDestroy(&sampleTable->chunks);
avifArrayDestroy(&sampleTable->sampleToChunks);
@@ -145,6 +144,21 @@
avifFree(sampleTable);
}
+static uint32_t avifSampleTableGetImageDelta(avifSampleTable * sampleTable, int imageIndex)
+{
+ int maxSampleIndex = 0;
+ for (uint32_t i = 0; i < sampleTable->timeToSamples.count; ++i) {
+ avifSampleTableTimeToSample * timeToSample = &sampleTable->timeToSamples.timeToSample[i];
+ maxSampleIndex += timeToSample->sampleCount;
+ if ((imageIndex < maxSampleIndex) || (i == (sampleTable->timeToSamples.count - 1))) {
+ return timeToSample->sampleDelta;
+ }
+ }
+
+ // TODO: fail here?
+ return 1;
+}
+
// one video track ("trak" contents)
typedef struct avifTrack
{
@@ -157,6 +171,67 @@
AVIF_ARRAY_DECLARE(avifTrackArray, avifTrack, track);
// ---------------------------------------------------------------------------
+// avifCodecDecodeInput
+
+static avifCodecDecodeInput * avifCodecDecodeInputCreate()
+{
+ avifCodecDecodeInput * decodeInput = (avifCodecDecodeInput *)avifAlloc(sizeof(avifCodecDecodeInput));
+ memset(decodeInput, 0, sizeof(avifCodecDecodeInput));
+ avifArrayCreate(&decodeInput->samples, sizeof(avifRawData), 1);
+ return decodeInput;
+}
+
+static void avifCodecDecodeInputDestroy(avifCodecDecodeInput * decodeInput)
+{
+ avifArrayDestroy(&decodeInput->samples);
+ avifFree(decodeInput);
+}
+
+static avifBool avifCodecDecodeInputGetSamples(avifCodecDecodeInput * decodeInput, avifSampleTable * sampleTable, avifRawData * rawInput)
+{
+ uint32_t sampleSizeIndex = 0;
+ for (uint32_t chunkIndex = 0; chunkIndex < sampleTable->chunks.count; ++chunkIndex) {
+ avifSampleTableChunk * chunk = &sampleTable->chunks.chunk[chunkIndex];
+
+ // First, figure out how many samples are in this chunk
+ uint32_t sampleCount = 0;
+ for (int sampleToChunkIndex = sampleTable->sampleToChunks.count - 1; sampleToChunkIndex >= 0; --sampleToChunkIndex) {
+ avifSampleTableSampleToChunk * sampleToChunk = &sampleTable->sampleToChunks.sampleToChunk[sampleToChunkIndex];
+ if (sampleToChunk->firstChunk <= (chunkIndex + 1)) {
+ sampleCount = sampleToChunk->samplesPerChunk;
+ break;
+ }
+ }
+ if (sampleCount == 0) {
+ // chunks with 0 samples are invalid
+ return AVIF_FALSE;
+ }
+
+ uint64_t sampleOffset = chunk->offset;
+ for (uint32_t sampleIndex = 0; sampleIndex < sampleCount; ++sampleIndex) {
+ if (sampleSizeIndex >= sampleTable->sampleSizes.count) {
+ // We've run out of samples to sum
+ return AVIF_FALSE;
+ }
+
+ avifSampleTableSampleSize * sampleSize = &sampleTable->sampleSizes.sampleSize[sampleSizeIndex];
+
+ avifRawData * rawSample = (avifRawData *)avifArrayPushPtr(&decodeInput->samples);
+ rawSample->data = rawInput->data + sampleOffset;
+ rawSample->size = sampleSize->size;
+
+ if (sampleOffset > (uint64_t)rawInput->size) {
+ return AVIF_FALSE;
+ }
+
+ sampleOffset += sampleSize->size;
+ ++sampleSizeIndex;
+ }
+ }
+ return AVIF_TRUE;
+}
+
+// ---------------------------------------------------------------------------
// avifData
typedef struct avifData
@@ -165,10 +240,15 @@
avifItemArray items;
avifPropertyArray properties;
avifTrackArray tracks;
- int propertyCount;
+ avifRawData rawInput;
+ avifCodecDecodeInput * colorInput;
+ avifCodecDecodeInput * alphaInput;
+ avifDecoderSource source;
+ avifSampleTable * sourceSampleTable; // NULL unless (source == AVIF_DECODER_SOURCE_TRACKS), owned by an avifTrack
+ struct avifCodec * codec[AVIF_CODEC_PLANES_COUNT];
} avifData;
-avifData * avifDataCreate()
+static avifData * avifDataCreate()
{
avifData * data = (avifData *)avifAlloc(sizeof(avifData));
memset(data, 0, sizeof(avifData));
@@ -178,8 +258,19 @@
return data;
}
-void avifDataDestroy(avifData * data)
+static void avifDataResetCodec(avifData * data)
{
+ for (int i = 0; i < AVIF_CODEC_PLANES_COUNT; ++i) {
+ if (data->codec[i]) {
+ avifCodecDestroy(data->codec[i]);
+ data->codec[i] = NULL;
+ }
+ }
+}
+
+static void avifDataDestroy(avifData * data)
+{
+ avifDataResetCodec(data);
avifArrayDestroy(&data->items);
avifArrayDestroy(&data->properties);
for (uint32_t i = 0; i < data->tracks.count; ++i) {
@@ -188,10 +279,16 @@
}
}
avifArrayDestroy(&data->tracks);
+ if (data->colorInput) {
+ avifCodecDecodeInputDestroy(data->colorInput);
+ }
+ if (data->alphaInput) {
+ avifCodecDecodeInputDestroy(data->alphaInput);
+ }
avifFree(data);
}
-avifItem * avifDataFindItem(avifData * data, uint32_t itemID)
+static avifItem * avifDataFindItem(avifData * data, uint32_t itemID)
{
if (itemID == 0) {
return NULL;
@@ -248,7 +345,7 @@
for (int i = 0; i < itemCount; ++i) {
uint16_t itemID; // unsigned int(16) item_ID;
CHECK(avifStreamReadU16(&s, &itemID)); //
- uint16_t dataReferenceIndex; // unsigned int(16) data_reference_index;
+ uint16_t dataReferenceIndex; // unsigned int(16) data_ref rence_index;
CHECK(avifStreamReadU16(&s, &dataReferenceIndex)); //
uint64_t baseOffset; // unsigned int(base_offset_size*8) base_offset;
CHECK(avifStreamReadUX8(&s, &baseOffset, baseOffsetSize)); //
@@ -261,6 +358,9 @@
CHECK(avifStreamReadUX8(&s, &extentLength, lengthSize));
avifItem * item = avifDataFindItem(data, itemID);
+ if (!item) {
+ return AVIF_FALSE;
+ }
item->id = itemID;
item->offset = (uint32_t)(baseOffset + extentOffset);
item->size = (uint32_t)extentLength;
@@ -323,8 +423,6 @@
{
BEGIN_STREAM(s, raw, rawLen);
- data->propertyCount = 0;
-
while (avifStreamHasBytesLeft(&s, 1)) {
avifBoxHeader header;
CHECK(avifStreamReadBoxHeader(&s, &header));
@@ -393,6 +491,9 @@
}
avifItem * item = avifDataFindItem(data, itemID);
+ if (!item) {
+ return AVIF_FALSE;
+ }
// Associate property with item
avifProperty * prop = &data->properties.prop[propertyIndex];
@@ -457,6 +558,10 @@
CHECK(avifStreamRead(&s, itemType, 4)); //
avifItem * item = avifDataFindItem(data, itemID);
+ if (!item) {
+ return AVIF_FALSE;
+ }
+
memcpy(item->type, itemType, sizeof(itemType));
return AVIF_TRUE;
}
@@ -473,7 +578,7 @@
CHECK(avifStreamReadU16(&s, &tmp)); // unsigned int(16) entry_count;
entryCount = tmp;
} else if (version == 1) {
- CHECK(avifStreamReadU32(&s, &entryCount)); // unsigned int(16) entry_count;
+ CHECK(avifStreamReadU32(&s, &entryCount)); // unsigned int(32) entry_count;
} else {
return AVIF_FALSE;
}
@@ -512,7 +617,7 @@
CHECK(avifStreamReadU16(&s, &tmp)); // unsigned int(16) from_item_ID;
fromID = tmp;
} else if (version == 1) {
- CHECK(avifStreamReadU32(&s, &fromID)); // unsigned int(32) from_item_ID;
+ CHECK(avifStreamReadU32(&s, &fromID)); // unsigned int(32) from_item_ID;
} else {
// unsupported iref version, skip it
break;
@@ -528,7 +633,7 @@
CHECK(avifStreamReadU16(&s, &tmp)); // unsigned int(16) to_item_ID;
toID = tmp;
} else if (version == 1) {
- CHECK(avifStreamReadU32(&s, &toID)); // unsigned int(32) to_item_ID;
+ CHECK(avifStreamReadU32(&s, &toID)); // unsigned int(32) to_item_ID;
} else {
// unsupported iref version, skip it
break;
@@ -537,6 +642,10 @@
// Read this reference as "{fromID} is a {irefType} for {toID}"
if (fromID && toID) {
avifItem * item = avifDataFindItem(data, fromID);
+ if (!item) {
+ return AVIF_FALSE;
+ }
+
if (!memcmp(irefHeader.type, "thmb", 4)) {
item->thumbnailForID = toID;
}
@@ -743,39 +852,6 @@
CHECK(avifStreamSkip(&s, header.size));
}
-
- // Now calculate chunk sizes from the read-in sample table
- uint32_t sampleSizeIndex = 0;
- for (uint32_t chunkIndex = 0; chunkIndex < track->sampleTable->chunks.count; ++chunkIndex) {
- avifSampleTableChunk * chunk = &track->sampleTable->chunks.chunk[chunkIndex];
-
- // First, figure out how many samples are in this chunk
- uint32_t sampleCount = 0;
- for (int sampleToChunkIndex = track->sampleTable->sampleToChunks.count - 1; sampleToChunkIndex >= 0; --sampleToChunkIndex) {
- avifSampleTableSampleToChunk * sampleToChunk = &track->sampleTable->sampleToChunks.sampleToChunk[sampleToChunkIndex];
- if (sampleToChunk->firstChunk <= (chunkIndex + 1)) {
- sampleCount = sampleToChunk->samplesPerChunk;
- break;
- }
- }
- if (sampleCount == 0) {
- // chunks with 0 samples are invalid
- return AVIF_FALSE;
- }
-
- // Then sum up the next sampleCount samples into this chunk
- for (uint32_t sampleIndex = 0; sampleIndex < sampleCount; ++sampleIndex) {
- if (sampleSizeIndex >= track->sampleTable->sampleSizes.count) {
- // We've run out of samples to sum
- return AVIF_FALSE;
- }
-
- avifSampleTableSampleSize * sampleSize = &track->sampleTable->sampleSizes.sampleSize[sampleSizeIndex];
- chunk->size += sampleSize->size;
- ++sampleSizeIndex;
- }
- }
-
return AVIF_TRUE;
}
@@ -815,6 +891,26 @@
return AVIF_TRUE;
}
+static avifBool avifTrackReferenceBox(avifData * data, avifTrack * track, uint8_t * raw, size_t rawLen)
+{
+ BEGIN_STREAM(s, raw, rawLen);
+
+ while (avifStreamHasBytesLeft(&s, 1)) {
+ avifBoxHeader header;
+ CHECK(avifStreamReadBoxHeader(&s, &header));
+
+ if (!memcmp(header.type, "auxl", 4)) {
+ uint32_t toID;
+ CHECK(avifStreamReadU32(&s, &toID)); // unsigned int(32) track_IDs[]
+ CHECK(avifStreamSkip(&s, header.size - sizeof(uint32_t))); // just take the first one
+ track->auxForID = toID;
+ } else {
+ CHECK(avifStreamSkip(&s, header.size));
+ }
+ }
+ return AVIF_TRUE;
+}
+
static avifBool avifParseTrackBox(avifData * data, uint8_t * raw, size_t rawLen)
{
BEGIN_STREAM(s, raw, rawLen);
@@ -829,6 +925,8 @@
CHECK(avifParseTrackHeaderBox(data, track, avifStreamCurrent(&s), header.size));
} else if (!memcmp(header.type, "mdia", 4)) {
CHECK(avifParseMediaBox(data, track, avifStreamCurrent(&s), header.size));
+ } else if (!memcmp(header.type, "tref", 4)) {
+ CHECK(avifTrackReferenceBox(data, track, avifStreamCurrent(&s), header.size));
}
CHECK(avifStreamSkip(&s, header.size));
@@ -904,90 +1002,228 @@
return decoder;
}
+static void avifDecoderCleanup(avifDecoder * decoder)
+{
+ if (decoder->data) {
+ avifDataDestroy(decoder->data);
+ decoder->data = NULL;
+ }
+
+ if (decoder->image) {
+ avifImageDestroy(decoder->image);
+ decoder->image = NULL;
+ }
+}
+
void avifDecoderDestroy(avifDecoder * decoder)
{
+ avifDecoderCleanup(decoder);
avifFree(decoder);
}
-avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, avifRawData * input)
+avifResult avifDecoderSetSource(avifDecoder * decoder, avifDecoderSource source)
{
- avifCodec * codec = NULL;
- avifData * data = NULL;
- avifResult result = AVIF_RESULT_UNKNOWN_ERROR;
+ decoder->requestedSource = source;
+ return avifDecoderReset(decoder);
+}
+avifResult avifDecoderParse(avifDecoder * decoder, avifRawData * rawInput)
+{
#if !defined(AVIF_CODEC_AOM) && !defined(AVIF_CODEC_DAV1D)
// Just bail out early, we're not surviving this function without a decoder compiled in
return AVIF_RESULT_NO_CODEC_AVAILABLE;
#endif
+ // Cleanup anything lingering in the decoder
+ avifDecoderCleanup(decoder);
+
// -----------------------------------------------------------------------
// Parse BMFF boxes
- data = avifDataCreate();
- if (!avifParse(data, input->data, input->size)) {
- result = AVIF_RESULT_BMFF_PARSE_FAILED;
- goto cleanup;
+ decoder->data = avifDataCreate();
+
+ // Shallow copy, on purpose
+ memcpy(&decoder->data->rawInput, rawInput, sizeof(avifRawData));
+
+ if (!avifParse(decoder->data, decoder->data->rawInput.data, decoder->data->rawInput.size)) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
}
- avifBool avifCompatible = (memcmp(data->ftyp.majorBrand, "avif", 4) == 0) ? AVIF_TRUE : AVIF_FALSE;
+ avifBool avifCompatible = (memcmp(decoder->data->ftyp.majorBrand, "avif", 4) == 0) ? AVIF_TRUE : AVIF_FALSE;
if (!avifCompatible) {
- for (int compatibleBrandIndex = 0; compatibleBrandIndex < data->ftyp.compatibleBrandsCount; ++compatibleBrandIndex) {
- uint8_t * compatibleBrand = &data->ftyp.compatibleBrands[4 * compatibleBrandIndex];
- if (!memcmp(compatibleBrand, "avif", 4)) {
- avifCompatible = AVIF_TRUE;
- break;
+ avifCompatible = (memcmp(decoder->data->ftyp.majorBrand, "avis", 4) == 0) ? AVIF_TRUE : AVIF_FALSE;
+ if (!avifCompatible) {
+ for (int compatibleBrandIndex = 0; compatibleBrandIndex < decoder->data->ftyp.compatibleBrandsCount; ++compatibleBrandIndex) {
+ uint8_t * compatibleBrand = &decoder->data->ftyp.compatibleBrands[4 * compatibleBrandIndex];
+ if (!memcmp(compatibleBrand, "avif", 4)) {
+ avifCompatible = AVIF_TRUE;
+ break;
+ }
+ if (!memcmp(compatibleBrand, "avis", 4)) {
+ avifCompatible = AVIF_TRUE;
+ break;
+ }
}
}
}
if (!avifCompatible) {
- result = AVIF_RESULT_INVALID_FTYP;
- goto cleanup;
+ return AVIF_RESULT_INVALID_FTYP;
+ }
+
+ // Sanity check items
+ for (uint32_t itemIndex = 0; itemIndex < decoder->data->items.count; ++itemIndex) {
+ avifItem * item = &decoder->data->items.item[itemIndex];
+ if (item->offset > decoder->data->rawInput.size) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+ uint64_t offsetSize = (uint64_t)item->offset + (uint64_t)item->size;
+ if (offsetSize > (uint64_t)decoder->data->rawInput.size) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+ }
+
+ // Sanity check tracks
+ for (uint32_t trackIndex = 0; trackIndex < decoder->data->tracks.count; ++trackIndex) {
+ avifTrack * track = &decoder->data->tracks.track[trackIndex];
+ if (!track->sampleTable) {
+ continue;
+ }
+
+ for (uint32_t chunkIndex = 0; chunkIndex < track->sampleTable->chunks.count; ++chunkIndex) {
+ avifSampleTableChunk * chunk = &track->sampleTable->chunks.chunk[chunkIndex];
+ if (chunk->offset > decoder->data->rawInput.size) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+ }
+ }
+ return avifDecoderReset(decoder);
+}
+
+static avifCodec * avifCodecCreateForDecode(avifCodecDecodeInput * decodeInput)
+{
+ avifCodec * codec = NULL;
+#if defined(AVIF_CODEC_DAV1D)
+ codec = avifCodecCreateDav1d();
+#elif defined(AVIF_CODEC_AOM)
+ codec = avifCodecCreateAOM();
+#else
+#error No decoder available!
+#endif
+ if (codec) {
+ codec->decodeInput = decodeInput;
+ }
+ return codec;
+}
+
+avifResult avifDecoderReset(avifDecoder * decoder)
+{
+ avifData * data = decoder->data;
+ if (!data) {
+ // Nothing to reset.
+ return AVIF_RESULT_OK;
+ }
+
+ avifDataResetCodec(data);
+ if (!decoder->image) {
+ decoder->image = avifImageCreateEmpty();
}
// -----------------------------------------------------------------------
+ // Build decode input
- avifRawData colorOBU = AVIF_RAW_DATA_EMPTY;
- avifRawData alphaOBU = AVIF_RAW_DATA_EMPTY;
- avifItem * colorOBUItem = NULL;
- avifItem * alphaOBUItem = NULL;
-
- // Sanity check items
- for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
- avifItem * item = &data->items.item[itemIndex];
- if (item->offset > input->size) {
- result = AVIF_RESULT_BMFF_PARSE_FAILED;
- goto cleanup;
+ data->sourceSampleTable = NULL; // Reset
+ if (decoder->requestedSource == AVIF_DECODER_SOURCE_AUTO) {
+ if (data->tracks.count > 0) {
+ data->source = AVIF_DECODER_SOURCE_TRACKS;
+ } else {
+ data->source = AVIF_DECODER_SOURCE_PRIMARY_ITEM;
}
- uint64_t offsetSize = (uint64_t)item->offset + (uint64_t)item->size;
- if (offsetSize > (uint64_t)input->size) {
- result = AVIF_RESULT_BMFF_PARSE_FAILED;
- goto cleanup;
- }
+ } else {
+ data->source = decoder->requestedSource;
}
- // Find the colorOBU item
- for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
- avifItem * item = &data->items.item[itemIndex];
- if (!item->id || !item->size) {
+ if (data->source == AVIF_DECODER_SOURCE_TRACKS) {
+ avifTrack * colorTrack = NULL;
+ avifTrack * alphaTrack = NULL;
+
+ // Find primary track - this probably needs some better detection
+ uint32_t colorTrackIndex = 0;
+ for (; colorTrackIndex < decoder->data->tracks.count; ++colorTrackIndex) {
+ avifTrack * track = &decoder->data->tracks.track[colorTrackIndex];
+ if (!track->sampleTable) {
+ continue;
+ }
+ if (!track->sampleTable->chunks.count) {
+ continue;
+ }
+ if (track->auxForID != 0) {
+ continue;
+ }
+
+ // Found one!
break;
}
- if (memcmp(item->type, "av01", 4)) {
- // probably exif or some other data
- continue;
+ if (colorTrackIndex == decoder->data->tracks.count) {
+ return AVIF_RESULT_NO_CONTENT;
}
- if (item->thumbnailForID != 0) {
- // It's a thumbnail, skip it
- continue;
+ colorTrack = &decoder->data->tracks.track[colorTrackIndex];
+
+ uint32_t alphaTrackIndex = 0;
+ for (; alphaTrackIndex < decoder->data->tracks.count; ++alphaTrackIndex) {
+ avifTrack * track = &decoder->data->tracks.track[alphaTrackIndex];
+ if (!track->sampleTable) {
+ continue;
+ }
+ if (!track->sampleTable->chunks.count) {
+ continue;
+ }
+ if (track->auxForID == colorTrack->id) {
+ // Found it!
+ break;
+ }
+ }
+ if (alphaTrackIndex != decoder->data->tracks.count) {
+ alphaTrack = &decoder->data->tracks.track[alphaTrackIndex];
}
- colorOBUItem = item;
- colorOBU.data = input->data + item->offset;
- colorOBU.size = item->size;
- break;
- }
+ // TODO: We must get color profile information from somewhere; likely the color OBU as a fallback
- // Find the alphaOBU item, if any
- if (colorOBUItem) {
+ data->colorInput = avifCodecDecodeInputCreate();
+ if (!avifCodecDecodeInputGetSamples(data->colorInput, colorTrack->sampleTable, &decoder->data->rawInput)) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+
+ if (alphaTrack) {
+ data->alphaInput = avifCodecDecodeInputCreate();
+ if (!avifCodecDecodeInputGetSamples(data->alphaInput, alphaTrack->sampleTable, &decoder->data->rawInput)) {
+ return AVIF_RESULT_BMFF_PARSE_FAILED;
+ }
+ data->alphaInput->alpha = AVIF_TRUE;
+ }
+
+ // Stash off sample table for future timing information
+ data->sourceSampleTable = colorTrack->sampleTable;
+
+ // Image sequence timing
+ decoder->imageIndex = -1;
+ decoder->imageCount = data->colorInput->samples.count;
+ decoder->timescale = colorTrack->mediaTimescale;
+ decoder->durationInTimescales = colorTrack->mediaDuration;
+ if (colorTrack->mediaTimescale) {
+ decoder->duration = (double)decoder->durationInTimescales / (double)colorTrack->mediaTimescale;
+ } else {
+ decoder->duration = 0;
+ }
+ memset(&decoder->imageTiming, 0, sizeof(decoder->imageTiming)); // to be set in avifDecoderNextImage()
+ } else {
+ // Create from items
+
+ avifRawData colorOBU = AVIF_RAW_DATA_EMPTY;
+ avifRawData alphaOBU = AVIF_RAW_DATA_EMPTY;
+ avifItem * colorOBUItem = NULL;
+ avifItem * alphaOBUItem = NULL;
+
+ // Find the colorOBU item
for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
avifItem * item = &data->items.item[itemIndex];
if (!item->id || !item->size) {
@@ -1002,105 +1238,160 @@
continue;
}
- if (isAlphaURN(item->auxC.auxType) && (item->auxForID == colorOBUItem->id)) {
- alphaOBUItem = item;
- alphaOBU.data = input->data + item->offset;
- alphaOBU.size = item->size;
- break;
+ colorOBUItem = item;
+ colorOBU.data = data->rawInput.data + item->offset;
+ colorOBU.size = item->size;
+ break;
+ }
+
+ // Find the alphaOBU item, if any
+ if (colorOBUItem) {
+ for (uint32_t itemIndex = 0; itemIndex < data->items.count; ++itemIndex) {
+ avifItem * item = &data->items.item[itemIndex];
+ if (!item->id || !item->size) {
+ break;
+ }
+ if (memcmp(item->type, "av01", 4)) {
+ // probably exif or some other data
+ continue;
+ }
+ if (item->thumbnailForID != 0) {
+ // It's a thumbnail, skip it
+ continue;
+ }
+
+ if (isAlphaURN(item->auxC.auxType) && (item->auxForID == colorOBUItem->id)) {
+ alphaOBUItem = item;
+ alphaOBU.data = data->rawInput.data + item->offset;
+ alphaOBU.size = item->size;
+ break;
+ }
}
}
- }
- if (colorOBU.size == 0) {
- result = AVIF_RESULT_NO_AV1_ITEMS_FOUND;
- goto cleanup;
- }
- avifBool hasAlpha = (alphaOBU.size > 0) ? AVIF_TRUE : AVIF_FALSE;
-
-#if defined(AVIF_CODEC_DAV1D)
- codec = avifCodecCreateDav1d();
-#elif defined(AVIF_CODEC_AOM)
- codec = avifCodecCreateAOM();
-#else
- // #error No decoder available!
- return AVIF_RESULT_NO_CODEC_AVAILABLE;
-#endif
- if (!codec->decode(codec, AVIF_CODEC_PLANES_COLOR, &colorOBU)) {
- result = AVIF_RESULT_DECODE_COLOR_FAILED;
- goto cleanup;
- }
- avifCodecImageSize colorPlanesSize = codec->getImageSize(codec, AVIF_CODEC_PLANES_COLOR);
-
- avifCodecImageSize alphaPlanesSize;
- memset(&alphaPlanesSize, 0, sizeof(alphaPlanesSize));
- if (hasAlpha) {
- if (!codec->decode(codec, AVIF_CODEC_PLANES_ALPHA, &alphaOBU)) {
- result = AVIF_RESULT_DECODE_ALPHA_FAILED;
- goto cleanup;
+ if (colorOBU.size == 0) {
+ return AVIF_RESULT_NO_AV1_ITEMS_FOUND;
}
- alphaPlanesSize = codec->getImageSize(codec, AVIF_CODEC_PLANES_ALPHA);
- if ((colorPlanesSize.width != alphaPlanesSize.width) || (colorPlanesSize.height != alphaPlanesSize.height)) {
- result = AVIF_RESULT_COLOR_ALPHA_SIZE_MISMATCH;
- goto cleanup;
+ if (colorOBUItem->colrPresent) {
+ if (colorOBUItem->colr.format == AVIF_PROFILE_FORMAT_ICC) {
+ avifImageSetProfileICC(decoder->image, colorOBUItem->colr.icc, colorOBUItem->colr.iccSize);
+ } else if (colorOBUItem->colr.format == AVIF_PROFILE_FORMAT_NCLX) {
+ avifImageSetProfileNCLX(decoder->image, &colorOBUItem->colr.nclx);
+ }
+ }
+
+ data->colorInput = avifCodecDecodeInputCreate();
+ avifRawData * rawColorInput = (avifRawData *)avifArrayPushPtr(&data->colorInput->samples);
+ memcpy(rawColorInput, &colorOBU, sizeof(avifRawData));
+ if (alphaOBU.size > 0) {
+ data->alphaInput = avifCodecDecodeInputCreate();
+ avifRawData * rawAlphaInput = (avifRawData *)avifArrayPushPtr(&data->alphaInput->samples);
+ memcpy(rawAlphaInput, &alphaOBU, sizeof(avifRawData));
+ data->alphaInput->alpha = AVIF_TRUE;
+ }
+
+ // Set all counts and timing to safe-but-uninteresting values
+ decoder->imageIndex = -1;
+ decoder->imageCount = 1;
+ decoder->imageTiming.timescale = 1;
+ decoder->imageTiming.pts = 0;
+ decoder->imageTiming.ptsInTimescales = 0;
+ decoder->imageTiming.duration = 1;
+ decoder->imageTiming.durationInTimescales = 1;
+ decoder->timescale = 1;
+ decoder->duration = 1;
+ decoder->durationInTimescales = 1;
+ }
+
+ data->codec[AVIF_CODEC_PLANES_COLOR] = avifCodecCreateForDecode(data->colorInput);
+ if (!data->codec[AVIF_CODEC_PLANES_COLOR]->decode(data->codec[AVIF_CODEC_PLANES_COLOR])) {
+ return AVIF_RESULT_DECODE_COLOR_FAILED;
+ }
+
+ if (data->alphaInput) {
+ decoder->data->codec[AVIF_CODEC_PLANES_ALPHA] = avifCodecCreateForDecode(data->alphaInput);
+ if (!data->codec[AVIF_CODEC_PLANES_ALPHA]->decode(data->codec[AVIF_CODEC_PLANES_ALPHA])) {
+ return AVIF_RESULT_DECODE_ALPHA_FAILED;
}
}
+ return AVIF_RESULT_OK;
+}
- if ((colorOBUItem && colorOBUItem->ispePresent &&
- ((colorOBUItem->ispe.width != colorPlanesSize.width) || (colorOBUItem->ispe.height != colorPlanesSize.height))) ||
- (alphaOBUItem && alphaOBUItem->ispePresent &&
- ((alphaOBUItem->ispe.width != alphaPlanesSize.width) || (alphaOBUItem->ispe.height != alphaPlanesSize.height)))) {
- result = AVIF_RESULT_ISPE_SIZE_MISMATCH;
- goto cleanup;
- }
-
- if (colorOBUItem->colrPresent) {
- if (colorOBUItem->colr.format == AVIF_PROFILE_FORMAT_ICC) {
- avifImageSetProfileICC(image, colorOBUItem->colr.icc, colorOBUItem->colr.iccSize);
- } else if (colorOBUItem->colr.format == AVIF_PROFILE_FORMAT_NCLX) {
- avifImageSetProfileNCLX(image, &colorOBUItem->colr.nclx);
+avifResult avifDecoderNextImage(avifDecoder * decoder)
+{
+ avifCodec * colorCodec = decoder->data->codec[AVIF_CODEC_PLANES_COLOR];
+ if (!colorCodec->getNextImage(colorCodec, decoder->image)) {
+ if (decoder->image->width) {
+ // We've sent at least one image, but we've run out now.
+ return AVIF_RESULT_NO_IMAGES_REMAINING;
}
+ return AVIF_RESULT_DECODE_COLOR_FAILED;
}
- avifImageFreePlanes(image, AVIF_PLANES_ALL);
-
- avifResult imageResult = codec->getDecodedImage(codec, image);
- if (imageResult != AVIF_RESULT_OK) {
- result = imageResult;
- goto cleanup;
+ avifCodec * alphaCodec = decoder->data->codec[AVIF_CODEC_PLANES_ALPHA];
+ if (alphaCodec) {
+ if (!alphaCodec->getNextImage(alphaCodec, decoder->image)) {
+ return AVIF_RESULT_DECODE_ALPHA_FAILED;
+ }
+ } else {
+ avifImageFreePlanes(decoder->image, AVIF_PLANES_A);
}
#if defined(AVIF_FIX_STUDIO_ALPHA)
- if (hasAlpha && codec->alphaLimitedRange(codec)) {
+ if (alphaCodec && alphaCodec->alphaLimitedRange(alphaCodec)) {
// Naughty! Alpha planes are supposed to be full range. Correct that here.
- if (avifImageUsesU16(image)) {
- for (int j = 0; j < image->height; ++j) {
- for (int i = 0; i < image->height; ++i) {
- uint16_t * alpha = (uint16_t *)&image->alphaPlane[(i * 2) + (j * image->alphaRowBytes)];
- *alpha = (uint16_t)avifLimitedToFullY(image->depth, *alpha);
+ avifImageCopyDecoderAlpha(decoder->image);
+ if (avifImageUsesU16(decoder->image)) {
+ for (int j = 0; j < decoder->image->height; ++j) {
+ for (int i = 0; i < decoder->image->height; ++i) {
+ uint16_t * alpha = (uint16_t *)&decoder->image->alphaPlane[(i * 2) + (j * decoder->image->alphaRowBytes)];
+ *alpha = (uint16_t)avifLimitedToFullY(decoder->image->depth, *alpha);
}
}
} else {
- for (int j = 0; j < image->height; ++j) {
- for (int i = 0; i < image->height; ++i) {
- uint8_t * alpha = &image->alphaPlane[i + (j * image->alphaRowBytes)];
- *alpha = (uint8_t)avifLimitedToFullY(image->depth, *alpha);
+ for (int j = 0; j < decoder->image->height; ++j) {
+ for (int i = 0; i < decoder->image->height; ++i) {
+ uint8_t * alpha = &decoder->image->alphaPlane[i + (j * decoder->image->alphaRowBytes)];
+ *alpha = (uint8_t)avifLimitedToFullY(decoder->image->depth, *alpha);
}
}
}
}
#endif
- decoder->ioStats.colorOBUSize = colorOBU.size;
- decoder->ioStats.alphaOBUSize = alphaOBU.size;
+ ++decoder->imageIndex;
+ if (decoder->data->sourceSampleTable) {
+ // Decoding from a track! Provide timing information.
- result = AVIF_RESULT_OK;
-cleanup:
- if (codec) {
- avifCodecDestroy(codec);
+ decoder->imageTiming.timescale = decoder->timescale;
+ decoder->imageTiming.ptsInTimescales += decoder->imageTiming.durationInTimescales;
+ decoder->imageTiming.durationInTimescales = avifSampleTableGetImageDelta(decoder->data->sourceSampleTable, decoder->imageIndex);
+
+ if (decoder->imageTiming.timescale > 0) {
+ decoder->imageTiming.pts = (double)decoder->imageTiming.ptsInTimescales / (double)decoder->imageTiming.timescale;
+ decoder->imageTiming.duration = (double)decoder->imageTiming.durationInTimescales / (double)decoder->imageTiming.timescale;
+ } else {
+ decoder->imageTiming.pts = 0.0;
+ decoder->imageTiming.duration = 0.0;
+ }
}
- if (data) {
- avifDataDestroy(data);
+ return AVIF_RESULT_OK;
+}
+
+avifResult avifDecoderRead(avifDecoder * decoder, avifImage * image, avifRawData * input)
+{
+ avifResult result = avifDecoderParse(decoder, input);
+ if (result != AVIF_RESULT_OK) {
+ return result;
}
- return result;
+ result = avifDecoderNextImage(decoder);
+ if (result != AVIF_RESULT_OK) {
+ return result;
+ }
+ if (!decoder->image) {
+ return AVIF_RESULT_NO_IMAGES_REMAINING;
+ }
+ avifImageCopy(image, decoder->image);
+ return AVIF_RESULT_OK;
}
diff --git a/src/reformat.c b/src/reformat.c
index 6bb2631..fb4b99f 100644
--- a/src/reformat.c
+++ b/src/reformat.c
@@ -228,12 +228,28 @@
int uvJ = j >> state.formatInfo.chromaShiftY;
if (state.usesU16) {
yuvUNorm[0] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_Y][(i * 2) + (j * image->yuvRowBytes[AVIF_CHAN_Y])]);
- yuvUNorm[1] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_U][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])]);
- yuvUNorm[2] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_V][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])]);
+ if (image->yuvRowBytes[AVIF_CHAN_U]) {
+ yuvUNorm[1] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_U][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])]);
+ } else {
+ yuvUNorm[1] = 0;
+ }
+ if (image->yuvRowBytes[AVIF_CHAN_V]) {
+ yuvUNorm[2] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_V][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])]);
+ } else {
+ yuvUNorm[2] = 0;
+ }
} else {
yuvUNorm[0] = image->yuvPlanes[AVIF_CHAN_Y][i + (j * image->yuvRowBytes[AVIF_CHAN_Y])];
- yuvUNorm[1] = image->yuvPlanes[AVIF_CHAN_U][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
- yuvUNorm[2] = image->yuvPlanes[AVIF_CHAN_V][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
+ if (image->yuvRowBytes[AVIF_CHAN_U]) {
+ yuvUNorm[1] = image->yuvPlanes[AVIF_CHAN_U][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
+ } else {
+ yuvUNorm[1] = 0;
+ }
+ if (image->yuvRowBytes[AVIF_CHAN_V]) {
+ yuvUNorm[2] = image->yuvPlanes[AVIF_CHAN_V][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
+ } else {
+ yuvUNorm[2] = 0;
+ }
}
// adjust for limited/full color range, if need be
diff --git a/src/write.c b/src/write.c
index e24765b..8bfced6 100644
--- a/src/write.c
+++ b/src/write.c
@@ -28,7 +28,8 @@
avifEncoder * encoder = (avifEncoder *)avifAlloc(sizeof(avifEncoder));
memset(encoder, 0, sizeof(avifEncoder));
encoder->maxThreads = 1;
- encoder->quality = AVIF_BEST_QUALITY;
+ encoder->minQuantizer = AVIF_QUANTIZER_LOSSLESS;
+ encoder->maxQuantizer = AVIF_QUANTIZER_LOSSLESS;
return encoder;
}
@@ -37,6 +38,15 @@
avifFree(encoder);
}
+static avifCodec * avifCodecCreateForEncode()
+{
+#ifdef AVIF_CODEC_AOM
+ return avifCodecCreateAOM();
+#else
+ return NULL;
+#endif
+}
+
avifResult avifEncoderWrite(avifEncoder * encoder, avifImage * image, avifRawData * output)
{
if ((image->depth != 8) && (image->depth != 10) && (image->depth != 12)) {
@@ -46,14 +56,20 @@
avifResult result = AVIF_RESULT_UNKNOWN_ERROR;
avifRawData colorOBU = AVIF_RAW_DATA_EMPTY;
avifRawData alphaOBU = AVIF_RAW_DATA_EMPTY;
- avifCodec * codec = NULL;
+ avifCodec * codec[AVIF_CODEC_PLANES_COUNT];
-#ifdef AVIF_CODEC_AOM
- codec = avifCodecCreateAOM();
-#else
- // Just bail out early, we're not surviving this function without an encoder compiled in
- return AVIF_RESULT_NO_CODEC_AVAILABLE;
-#endif
+ codec[AVIF_CODEC_PLANES_COLOR] = avifCodecCreateForEncode();
+ if (!codec[AVIF_CODEC_PLANES_COLOR]) {
+ // Just bail out early, we're not surviving this function without an encoder compiled in
+ return AVIF_RESULT_NO_CODEC_AVAILABLE;
+ }
+
+ avifBool imageIsOpaque = avifImageIsOpaque(image);
+ if (imageIsOpaque) {
+ codec[AVIF_CODEC_PLANES_ALPHA] = NULL;
+ } else {
+ codec[AVIF_CODEC_PLANES_ALPHA] = avifCodecCreateForEncode();
+ }
avifStream s;
avifStreamStart(&s, output);
@@ -84,16 +100,23 @@
// -----------------------------------------------------------------------
// Encode AV1 OBUs
- avifRawData * alphaOBUPtr = &alphaOBU;
- if (avifImageIsOpaque(image)) {
- alphaOBUPtr = NULL;
- }
+ // avifRawData * alphaOBUPtr = &alphaOBU;
+ // if (avifImageIsOpaque(image)) {
+ // alphaOBUPtr = NULL;
+ // }
- avifResult encodeResult = codec->encodeImage(codec, image, encoder, &colorOBU, alphaOBUPtr);
- if (encodeResult != AVIF_RESULT_OK) {
- result = encodeResult;
+ if (!codec[AVIF_CODEC_PLANES_COLOR]->encodeImage(codec[AVIF_CODEC_PLANES_COLOR], image, encoder, &colorOBU, AVIF_FALSE)) {
+ result = AVIF_RESULT_ENCODE_COLOR_FAILED;
goto writeCleanup;
}
+
+ if (!imageIsOpaque) {
+ if (!codec[AVIF_CODEC_PLANES_ALPHA]->encodeImage(codec[AVIF_CODEC_PLANES_ALPHA], image, encoder, &alphaOBU, AVIF_TRUE)) {
+ result = AVIF_RESULT_ENCODE_ALPHA_FAILED;
+ goto writeCleanup;
+ }
+ }
+
avifBool hasAlpha = (alphaOBU.size > 0) ? AVIF_TRUE : AVIF_FALSE;
// -----------------------------------------------------------------------
@@ -257,7 +280,7 @@
ipmaPush(&ipmaColor, ipcoIndex);
avifCodecConfigurationBox colorConfig;
- codec->getConfigurationBox(codec, AVIF_CODEC_PLANES_COLOR, &colorConfig);
+ codec[AVIF_CODEC_PLANES_COLOR]->getConfigurationBox(codec[AVIF_CODEC_PLANES_COLOR], &colorConfig);
writeConfigBox(&s, &colorConfig);
++ipcoIndex;
ipmaPush(&ipmaColor, ipcoIndex);
@@ -271,7 +294,7 @@
ipmaPush(&ipmaAlpha, ipcoIndex);
avifCodecConfigurationBox alphaConfig;
- codec->getConfigurationBox(codec, AVIF_CODEC_PLANES_ALPHA, &alphaConfig);
+ codec[AVIF_CODEC_PLANES_ALPHA]->getConfigurationBox(codec[AVIF_CODEC_PLANES_ALPHA], &alphaConfig);
writeConfigBox(&s, &alphaConfig);
++ipcoIndex;
ipmaPush(&ipmaAlpha, ipcoIndex);
@@ -353,8 +376,11 @@
result = AVIF_RESULT_OK;
writeCleanup:
- if (codec) {
- avifCodecDestroy(codec);
+ if (codec[AVIF_CODEC_PLANES_COLOR]) {
+ avifCodecDestroy(codec[AVIF_CODEC_PLANES_COLOR]);
+ }
+ if (codec[AVIF_CODEC_PLANES_ALPHA]) {
+ avifCodecDestroy(codec[AVIF_CODEC_PLANES_ALPHA]);
}
avifRawDataFree(&colorOBU);
avifRawDataFree(&alphaOBU);