Large planes refactor to properly support any YUV format (YV12 and studio range still need work)
diff --git a/examples/avif_example1.c b/examples/avif_example1.c
index 49ed805..9c2323b 100644
--- a/examples/avif_example1.c
+++ b/examples/avif_example1.c
@@ -11,17 +11,17 @@
#if 1
int width = 32;
int height = 32;
- int depth = 12;
+ int depth = 8;
// Encode an orange, 8-bit, full opacity image
- avifImage * image = avifImageCreate();
- avifImageCreatePixels(image, AVIF_PIXEL_FORMAT_RGBA, width, height, depth);
+ avifImage * image = avifImageCreate(width, height, depth, AVIF_PIXEL_FORMAT_YUV444);
+ avifImageAllocatePlanes(image, AVIF_PLANES_RGB | AVIF_PLANES_A);
for (int j = 0; j < height; ++j) {
for (int i = 0; i < width; ++i) {
- image->planes[0][i + (j * image->strides[0])] = 4095; // R
- image->planes[1][i + (j * image->strides[1])] = 2000; // G
- image->planes[2][i + (j * image->strides[2])] = 0; // B
- image->planes[3][i + (j * image->strides[3])] = 4095; // A
+ image->rgbPlanes[0][i + (j * image->rgbRowBytes[0])] = 255; // R
+ image->rgbPlanes[1][i + (j * image->rgbRowBytes[1])] = 128; // G
+ image->rgbPlanes[2][i + (j * image->rgbRowBytes[2])] = 0; // B
+ image->alphaPlane[i + (j * image->alphaRowBytes)] = 255; // A
}
}
@@ -32,6 +32,7 @@
avifRawData raw = AVIF_RAW_DATA_EMPTY;
avifResult res = avifImageWrite(image, &raw, 50);
+#if 0
// debug
{
FILE * f = fopen("out.avif", "wb");
@@ -40,28 +41,24 @@
fclose(f);
}
}
+#endif
if (res == AVIF_RESULT_OK) {
// Decode it
- avifImage * decoded = avifImageCreate();
+ avifImage * decoded = avifImageCreateEmpty();
avifResult decodeResult = avifImageRead(decoded, &raw);
if (decodeResult == AVIF_RESULT_OK) {
- avifImage * rgbImage = avifImageCreate();
- avifResult reformatResult = avifImageReformatPixels(decoded, rgbImage, AVIF_PIXEL_FORMAT_RGBA, depth);
- if (reformatResult == AVIF_RESULT_OK) {
- for (int j = 0; j < height; ++j) {
- for (int i = 0; i < width; ++i) {
- for (int plane = 0; plane < 3; ++plane) {
- uint32_t src = image->planes[plane][i + (j * image->strides[plane])];
- uint32_t dst = rgbImage->planes[plane][i + (j * rgbImage->strides[plane])];
- if (src != dst) {
- printf("(%d,%d,p%d) %d != %d\n", i, j, plane, src, dst);
- }
+ for (int j = 0; j < height; ++j) {
+ for (int i = 0; i < width; ++i) {
+ for (int plane = 0; plane < 3; ++plane) {
+ uint32_t src = image->rgbPlanes[plane][i + (j * image->rgbRowBytes[plane])];
+ uint32_t dst = decoded->rgbPlanes[plane][i + (j * decoded->rgbRowBytes[plane])];
+ if (src != dst) {
+ printf("(%d,%d,p%d) %d != %d\n", i, j, plane, src, dst);
}
}
}
}
- avifImageDestroy(rgbImage);
}
avifImageDestroy(decoded);
}
diff --git a/include/avif/avif.h b/include/avif/avif.h
index 49b9659..11808c7 100644
--- a/include/avif/avif.h
+++ b/include/avif/avif.h
@@ -18,7 +18,30 @@
#define AVIF_BEST_QUALITY 0
#define AVIF_WORST_QUALITY 63
-#define AVIF_MAX_PLANES 4
+#define AVIF_PLANE_COUNT_RGB 3
+#define AVIF_PLANE_COUNT_YUV 3
+
+enum avifPlanesFlags
+{
+ AVIF_PLANES_RGB = (1 << 0),
+ AVIF_PLANES_YUV = (1 << 1),
+ AVIF_PLANES_A = (1 << 2),
+
+ AVIF_PLANES_ALL = 0xff
+};
+
+enum avifChannelIndex
+{
+ // rgbPlanes
+ AVIF_CHAN_R = 0,
+ AVIF_CHAN_G = 1,
+ AVIF_CHAN_B = 2,
+
+ // yuvPlanes - These are always correct, even if UV is flipped when encoded (YV12)
+ AVIF_CHAN_Y = 0,
+ AVIF_CHAN_U = 1,
+ AVIF_CHAN_V = 2
+};
// ---------------------------------------------------------------------------
// Utils
@@ -46,7 +69,10 @@
{
AVIF_RESULT_OK = 0,
AVIF_RESULT_UNKNOWN_ERROR,
+ AVIF_RESULT_NO_CONTENT,
+ AVIF_RESULT_NO_YUV_FORMAT_SELECTED,
AVIF_RESULT_REFORMAT_FAILED,
+ AVIF_RESULT_UNSUPPORTED_DEPTH,
AVIF_RESULT_ENCODE_COLOR_FAILED,
AVIF_RESULT_ENCODE_ALPHA_FAILED,
AVIF_RESULT_BMFF_PARSE_FAILED,
@@ -87,16 +113,29 @@
// No pixels are present
AVIF_PIXEL_FORMAT_NONE = 0,
- // R:0, G:1, B:2, A:3
- AVIF_PIXEL_FORMAT_RGBA,
-
- // Y:0, U:1, V:2, A:3, full size chroma
AVIF_PIXEL_FORMAT_YUV444,
-
- // Y:0, U:1, V:2, A:3, half size chroma
- AVIF_PIXEL_FORMAT_YUV420
+ AVIF_PIXEL_FORMAT_YUV422,
+ AVIF_PIXEL_FORMAT_YUV420,
+ AVIF_PIXEL_FORMAT_YV12
} avifPixelFormat;
+typedef struct avifPixelFormatInfo
+{
+ int chromaShiftX;
+ int chromaShiftY;
+} avifPixelFormatInfo;
+
+void avifGetPixelFormatInfo(avifPixelFormat format, avifPixelFormatInfo * info);
+
+// ---------------------------------------------------------------------------
+// avifRange
+
+typedef enum avifRange
+{
+ AVIF_RANGE_LIMITED = 0,
+ AVIF_RANGE_FULL,
+} avifRange;
+
// ---------------------------------------------------------------------------
// avifProfileFormat
@@ -115,33 +154,40 @@
typedef struct avifImage
{
// Image information
- avifPixelFormat pixelFormat;
int width;
int height;
- int depth; // all planes must share this depth
- uint16_t * planes[AVIF_MAX_PLANES];
- uint32_t strides[AVIF_MAX_PLANES];
+ int depth; // all planes (RGB/YUV/A) must share this depth; if depth>8, all planes are uint16_t internally
+
+ uint8_t * rgbPlanes[AVIF_PLANE_COUNT_RGB];
+ uint32_t rgbRowBytes[AVIF_PLANE_COUNT_RGB];
+
+ avifPixelFormat yuvFormat;
+ avifRange yuvRange;
+ uint8_t * yuvPlanes[AVIF_PLANE_COUNT_YUV];
+ uint32_t yuvRowBytes[AVIF_PLANE_COUNT_YUV];
+
+ uint8_t * alphaPlane;
+ uint32_t alphaRowBytes;
// Profile information
avifProfileFormat profileFormat;
avifRawData icc;
-
-#if 0
- // Additional data from an encode/decode (useful for verbose logging)
- struct Stats
- {
- uint32_t colorPayloadSize;
- uint32_t alphaPayloadSize;
- } stats;
-#endif
} avifImage;
-avifImage * avifImageCreate(void);
+avifImage * avifImageCreate(int width, int height, int depth, avifPixelFormat yuvFormat);
+avifImage * avifImageCreateEmpty(void); // helper for making an image to decode into
void avifImageDestroy(avifImage * image);
-void avifImageClear(avifImage * image);
-void avifImageCreatePixels(avifImage * image, avifPixelFormat pixelFormat, int width, int height, int depth);
+
+void avifImageAllocatePlanes(avifImage * image, uint32_t planes); // Ignores any pre-existing planes
+void avifImageFreePlanes(avifImage * image, uint32_t planes); // Ignores already-freed planes
avifResult avifImageRead(avifImage * image, avifRawData * input);
avifResult avifImageWrite(avifImage * image, avifRawData * output, int quality); // if OK, output must be freed with avifRawDataFree()
-avifResult avifImageReformatPixels(avifImage * srcImage, avifImage * dstImage, avifPixelFormat dstPixelFormat, int dstDepth);
+
+// Used by avifImageRead/avifImageWrite
+avifResult avifImageRGBToYUV(avifImage * image);
+avifResult avifImageYUVToRGB(avifImage * image);
+
+// Helpers
+avifBool avifImageUsesU16(avifImage * image);
#endif // ifndef AVIF_AVIF_H
diff --git a/src/avif.c b/src/avif.c
index a552e29..96bcf71 100644
--- a/src/avif.c
+++ b/src/avif.c
@@ -5,14 +5,26 @@
#include <string.h>
-static void avifImageClearPlanes(avifImage * image)
+void avifGetPixelFormatInfo(avifPixelFormat format, avifPixelFormatInfo * info)
{
- for (int plane = 0; plane < AVIF_MAX_PLANES; ++plane) {
- if (image->planes[plane]) {
- avifFree(image->planes[plane]);
- image->planes[plane] = NULL;
- }
- image->strides[plane] = 0;
+ memset(info, 0, sizeof(avifPixelFormatInfo));
+
+ switch (format) {
+ case AVIF_PIXEL_FORMAT_YUV444:
+ info->chromaShiftX = 0;
+ info->chromaShiftY = 0;
+ break;
+
+ case AVIF_PIXEL_FORMAT_YUV422:
+ info->chromaShiftX = 1;
+ info->chromaShiftY = 0;
+ break;
+
+ case AVIF_PIXEL_FORMAT_YUV420:
+ case AVIF_PIXEL_FORMAT_YV12:
+ info->chromaShiftX = 1;
+ info->chromaShiftY = 1;
+ break;
}
}
@@ -20,64 +32,117 @@
static void avifImageSetDefaults(avifImage * image)
{
memset(image, 0, sizeof(avifImage));
+ image->yuvRange = AVIF_RANGE_FULL;
}
-avifImage * avifImageCreate()
+avifImage * avifImageCreate(int width, int height, int depth, avifPixelFormat yuvFormat)
{
avifImage * image = (avifImage *)avifAlloc(sizeof(avifImage));
avifImageSetDefaults(image);
+ image->width = width;
+ image->height = height;
+ image->depth = depth;
+ image->yuvFormat = yuvFormat;
return image;
}
+avifImage * avifImageCreateEmpty(void)
+{
+ return avifImageCreate(0, 0, 0, AVIF_PIXEL_FORMAT_NONE);
+}
+
void avifImageDestroy(avifImage * image)
{
- avifImageClear(image);
+ avifImageFreePlanes(image, AVIF_PLANES_ALL);
+ avifRawDataFree(&image->icc);
avifFree(image);
}
-void avifImageClear(avifImage * image)
+void avifImageAllocatePlanes(avifImage * image, uint32_t planes)
{
- avifImageClearPlanes(image);
- avifImageSetDefaults(image);
-}
-
-void avifImageCreatePixels(avifImage * image, avifPixelFormat pixelFormat, int width, int height, int depth)
-{
- avifImageClearPlanes(image);
-
- switch (pixelFormat) {
- case AVIF_PIXEL_FORMAT_NONE:
- image->width = 0;
- image->height = 0;
- image->depth = 0;
- break;
-
- case AVIF_PIXEL_FORMAT_RGBA:
- case AVIF_PIXEL_FORMAT_YUV444:
- image->width = width;
- image->height = height;
- image->depth = depth;
- image->strides[0] = width;
- image->strides[1] = width;
- image->strides[2] = width;
- image->strides[3] = width;
- break;
-
- case AVIF_PIXEL_FORMAT_YUV420:
- image->width = width;
- image->height = height;
- image->depth = depth;
- image->strides[0] = width;
- image->strides[1] = width >> 1;
- image->strides[2] = width >> 1;
- image->strides[3] = width;
- break;
- }
- image->pixelFormat = pixelFormat;
-
- for (int plane = 0; plane < AVIF_MAX_PLANES; ++plane) {
- if (image->strides[plane]) {
- image->planes[plane] = avifAlloc(sizeof(uint16_t) * image->strides[plane] * image->height);
+ int channelSize = avifImageUsesU16(image) ? 2 : 1;
+ int fullRowBytes = channelSize * image->width;
+ int fullSize = fullRowBytes * image->height;
+ if (planes & AVIF_PLANES_RGB) {
+ if (!image->rgbPlanes[AVIF_CHAN_R]) {
+ image->rgbRowBytes[AVIF_CHAN_R] = fullRowBytes;
+ image->rgbPlanes[AVIF_CHAN_R] = avifAlloc(fullSize);
+ memset(image->rgbPlanes[AVIF_CHAN_R], 0, fullSize);
+ }
+ if (!image->rgbPlanes[AVIF_CHAN_G]) {
+ image->rgbRowBytes[AVIF_CHAN_G] = fullRowBytes;
+ image->rgbPlanes[AVIF_CHAN_G] = avifAlloc(fullSize);
+ memset(image->rgbPlanes[AVIF_CHAN_G], 0, fullSize);
+ }
+ if (!image->rgbPlanes[AVIF_CHAN_B]) {
+ image->rgbRowBytes[AVIF_CHAN_B] = fullRowBytes;
+ image->rgbPlanes[AVIF_CHAN_B] = avifAlloc(fullSize);
+ memset(image->rgbPlanes[AVIF_CHAN_B], 0, fullSize);
}
}
+ if ((planes & AVIF_PLANES_YUV) && (image->yuvFormat != AVIF_PIXEL_FORMAT_NONE)) {
+ avifPixelFormatInfo info;
+ avifGetPixelFormatInfo(image->yuvFormat, &info);
+
+ int uvRowBytes = channelSize * (image->width >> info.chromaShiftX);
+ int uvSize = uvRowBytes * (image->height >> info.chromaShiftY);
+ if (!image->yuvPlanes[AVIF_CHAN_Y]) {
+ image->yuvRowBytes[AVIF_CHAN_Y] = fullRowBytes;
+ image->yuvPlanes[AVIF_CHAN_Y] = avifAlloc(fullSize);
+ memset(image->yuvPlanes[AVIF_CHAN_Y], 0, fullSize);
+ }
+ if (!image->yuvPlanes[AVIF_CHAN_U]) {
+ image->yuvRowBytes[AVIF_CHAN_U] = uvRowBytes;
+ image->yuvPlanes[AVIF_CHAN_U] = avifAlloc(uvSize);
+ memset(image->yuvPlanes[AVIF_CHAN_U], 0, uvSize);
+ }
+ if (!image->yuvPlanes[AVIF_CHAN_V]) {
+ image->yuvRowBytes[AVIF_CHAN_V] = uvRowBytes;
+ image->yuvPlanes[AVIF_CHAN_V] = avifAlloc(uvSize);
+ memset(image->yuvPlanes[AVIF_CHAN_V], 0, uvSize);
+ }
+ }
+ if (planes & AVIF_PLANES_A) {
+ if (!image->alphaPlane) {
+ image->alphaRowBytes = fullRowBytes;
+ image->alphaPlane = avifAlloc(fullRowBytes * image->height);
+ memset(image->alphaPlane, 0, fullRowBytes * image->height);
+ }
+ }
+}
+
+void avifImageFreePlanes(avifImage * image, uint32_t planes)
+{
+ if (planes & AVIF_PLANES_RGB) {
+ avifFree(image->rgbPlanes[AVIF_CHAN_R]);
+ image->rgbPlanes[AVIF_CHAN_R] = NULL;
+ image->rgbRowBytes[AVIF_CHAN_R] = 0;
+ avifFree(image->rgbPlanes[AVIF_CHAN_G]);
+ image->rgbPlanes[AVIF_CHAN_G] = NULL;
+ image->rgbRowBytes[AVIF_CHAN_G] = 0;
+ avifFree(image->rgbPlanes[AVIF_CHAN_G]);
+ image->rgbPlanes[AVIF_CHAN_G] = NULL;
+ image->rgbRowBytes[AVIF_CHAN_G] = 0;
+ }
+ if ((planes & AVIF_PLANES_YUV) && (image->yuvFormat != AVIF_PIXEL_FORMAT_NONE)) {
+ avifFree(image->yuvPlanes[AVIF_CHAN_Y]);
+ image->yuvPlanes[AVIF_CHAN_Y] = NULL;
+ image->yuvRowBytes[AVIF_CHAN_Y] = 0;
+ avifFree(image->yuvPlanes[AVIF_CHAN_U]);
+ image->yuvPlanes[AVIF_CHAN_U] = NULL;
+ image->yuvRowBytes[AVIF_CHAN_U] = 0;
+ avifFree(image->yuvPlanes[AVIF_CHAN_V]);
+ image->yuvPlanes[AVIF_CHAN_V] = NULL;
+ image->yuvRowBytes[AVIF_CHAN_V] = 0;
+ }
+ if (planes & AVIF_PLANES_A) {
+ avifFree(image->alphaPlane);
+ image->alphaPlane = NULL;
+ image->alphaRowBytes = 0;
+ }
+}
+
+avifBool avifImageUsesU16(avifImage * image)
+{
+ return (image->depth > 8) ? AVIF_TRUE : AVIF_FALSE;
}
diff --git a/src/read.c b/src/read.c
index c5ee4f6..0cf1cad 100644
--- a/src/read.c
+++ b/src/read.c
@@ -278,51 +278,68 @@
return AVIF_RESULT_ISPE_SIZE_MISMATCH;
}
- avifPixelFormat pixelFormat;
- int xShift = 0;
- int yShift = 0;
+ avifPixelFormat yuvFormat = AVIF_PIXEL_FORMAT_NONE;
switch (aomColorImage->fmt) {
+ case AOM_IMG_FMT_I420:
+ case AOM_IMG_FMT_AOMI420:
case AOM_IMG_FMT_I42016:
- pixelFormat = AVIF_PIXEL_FORMAT_YUV420;
- xShift = 1;
- yShift = 1;
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV420;
break;
+ case AOM_IMG_FMT_I422:
+ case AOM_IMG_FMT_I42216:
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV422;
+ break;
+ case AOM_IMG_FMT_I444:
case AOM_IMG_FMT_I44416:
- pixelFormat = AVIF_PIXEL_FORMAT_YUV444;
+ yuvFormat = AVIF_PIXEL_FORMAT_YUV444;
break;
+ case AOM_IMG_FMT_YV12:
+ case AOM_IMG_FMT_AOMYV12:
+ case AOM_IMG_FMT_YV1216:
+ yuvFormat = AVIF_PIXEL_FORMAT_YV12;
+ break;
+ case AOM_IMG_FMT_NONE:
default:
- aom_codec_destroy(&colorDecoder);
- if (hasAlpha) {
- aom_codec_destroy(&alphaDecoder);
- }
- return AVIF_UNSUPPORTED_PIXEL_FORMAT;
+ break;
}
- avifImageCreatePixels(image, pixelFormat, aomColorImage->d_w, aomColorImage->d_h, aomColorImage->bit_depth);
+ avifImageFreePlanes(image, AVIF_PLANES_ALL);
+ image->width = aomColorImage->d_w;
+ image->height = aomColorImage->d_h;
+ image->depth = aomColorImage->bit_depth;
+ image->yuvFormat = yuvFormat;
+ image->yuvRange = (aomColorImage->range == AOM_CR_STUDIO_RANGE) ? AVIF_RANGE_LIMITED : AVIF_RANGE_FULL;
- uint16_t maxChannel = (1 << image->depth) - 1;
+ avifPixelFormatInfo formatInfo;
+ avifGetPixelFormatInfo(yuvFormat, &formatInfo);
+
+ int uvHeight = image->height >> formatInfo.chromaShiftY;
+ avifImageAllocatePlanes(image, AVIF_PLANES_YUV);
for (int j = 0; j < image->height; ++j) {
- for (int i = 0; i < image->width; ++i) {
- uint16_t * planeChannel;
- int x = i >> xShift;
- int y = j >> yShift;
-
- planeChannel = (uint16_t *)&aomColorImage->planes[0][(j * aomColorImage->stride[0]) + (2 * i)];
- image->planes[0][i + (j * image->strides[0])] = *planeChannel;
- planeChannel = (uint16_t *)&aomColorImage->planes[1][(y * aomColorImage->stride[1]) + (2 * x)];
- image->planes[1][x + (y * image->strides[1])] = *planeChannel;
- planeChannel = (uint16_t *)&aomColorImage->planes[2][(y * aomColorImage->stride[2]) + (2 * x)];
- image->planes[2][x + (y * image->strides[2])] = *planeChannel;
-
- if (hasAlpha) {
- uint16_t * planeChannel = (uint16_t *)&aomAlphaImage->planes[0][(j * aomColorImage->stride[0]) + (2 * i)];
- image->planes[3][i + (j * image->strides[3])] = *planeChannel;
- } else {
- image->planes[3][i + (j * image->strides[3])] = maxChannel;
+ for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ if ((yuvPlane > 0) && (j >= uvHeight)) {
+ // Bail out if we're on a half-height UV plane
+ break;
}
+
+ uint8_t * srcRow = &aomColorImage->planes[yuvPlane][j * aomColorImage->stride[yuvPlane]];
+ uint8_t * dstRow = &image->yuvPlanes[yuvPlane][j * image->yuvRowBytes[yuvPlane]];
+ memcpy(dstRow, srcRow, image->yuvRowBytes[yuvPlane]);
}
}
+ if (hasAlpha) {
+ avifImageAllocatePlanes(image, AVIF_PLANES_A);
+ for (int j = 0; j < image->height; ++j) {
+ uint8_t * srcAlphaRow = &aomAlphaImage->planes[0][j * aomAlphaImage->stride[0]];
+ uint8_t * dstAlphaRow = &image->alphaPlane[j * image->alphaRowBytes];
+ memcpy(dstAlphaRow, srcAlphaRow, image->alphaRowBytes);
+ }
+ }
+
+ // Make this optional?
+ avifImageYUVToRGB(image);
+
aom_codec_destroy(&colorDecoder);
if (hasAlpha) {
aom_codec_destroy(&alphaDecoder);
diff --git a/src/reformat.c b/src/reformat.c
index 66efae1..96fe1ed 100644
--- a/src/reformat.c
+++ b/src/reformat.c
@@ -5,23 +5,65 @@
#include <string.h>
-static avifResult reformatRGBAToYUV444(avifImage * srcImage, avifImage * dstImage, int dstDepth)
+typedef struct avifReformatState
{
+ // YUV coefficients
+ float kr;
+ float kg;
+ float kb;
+
+ avifPixelFormatInfo formatInfo;
+ avifBool usesU16;
+} avifReformatState;
+
+static avifBool avifPrepareReformatState(avifImage * image, avifReformatState * state)
+{
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_NONE) {
+ return AVIF_FALSE;
+ }
+ avifGetPixelFormatInfo(image->yuvFormat, &state->formatInfo);
+
// TODO: calculate coefficients
- const float kr = 0.2126f;
- const float kb = 0.0722f;
- const float kg = 1.0f - kr - kb;
+ state->kr = 0.2126f;
+ state->kb = 0.0722f;
+ state->kg = 1.0f - state->kr - state->kb;
+
+ state->usesU16 = avifImageUsesU16(image);
+ return AVIF_TRUE;
+}
+
+avifResult avifImageRGBToYUV(avifImage * image)
+{
+ if (!image->rgbPlanes[AVIF_CHAN_R] || !image->rgbPlanes[AVIF_CHAN_G] || !image->rgbPlanes[AVIF_CHAN_B]) {
+ return AVIF_RESULT_REFORMAT_FAILED;
+ }
+
+ avifReformatState state;
+ if (!avifPrepareReformatState(image, &state)) {
+ return AVIF_RESULT_REFORMAT_FAILED;
+ }
+
+ avifImageAllocatePlanes(image, AVIF_PLANES_YUV);
+
+ const float kr = state.kr;
+ const float kg = state.kg;
+ const float kb = state.kb;
float yuvPixel[3];
float rgbPixel[3];
- float srcMaxChannel = (float)((1 << srcImage->depth) - 1);
- float dstMaxChannel = (float)((1 << dstImage->depth) - 1);
- for (int j = 0; j < srcImage->height; ++j) {
- for (int i = 0; i < srcImage->width; ++i) {
+ float maxChannel = (float)((1 << image->depth) - 1);
+ for (int j = 0; j < image->height; ++j) {
+ for (int i = 0; i < image->width; ++i) {
// Unpack RGB into normalized float
- rgbPixel[0] = srcImage->planes[0][i + (j * srcImage->strides[0])] / srcMaxChannel;
- rgbPixel[1] = srcImage->planes[1][i + (j * srcImage->strides[1])] / srcMaxChannel;
- rgbPixel[2] = srcImage->planes[2][i + (j * srcImage->strides[2])] / srcMaxChannel;
+ if (state.usesU16) {
+ rgbPixel[0] = *((uint16_t *)(&image->rgbPlanes[AVIF_CHAN_R][(i * 2) + (j * image->rgbRowBytes[AVIF_CHAN_R])])) / maxChannel;
+ rgbPixel[1] = *((uint16_t *)(&image->rgbPlanes[AVIF_CHAN_G][(i * 2) + (j * image->rgbRowBytes[AVIF_CHAN_G])])) / maxChannel;
+ rgbPixel[2] = *((uint16_t *)(&image->rgbPlanes[AVIF_CHAN_B][(i * 2) + (j * image->rgbRowBytes[AVIF_CHAN_B])])) / maxChannel;
+ } else {
+ rgbPixel[0] = image->rgbPlanes[AVIF_CHAN_R][i + (j * image->rgbRowBytes[AVIF_CHAN_R])] / maxChannel;
+ rgbPixel[1] = image->rgbPlanes[AVIF_CHAN_G][i + (j * image->rgbRowBytes[AVIF_CHAN_G])] / maxChannel;
+ rgbPixel[2] = image->rgbPlanes[AVIF_CHAN_B][i + (j * image->rgbRowBytes[AVIF_CHAN_B])] / maxChannel;
+ }
// RGB -> YUV conversion
float Y = (kr * rgbPixel[0]) + (kg * rgbPixel[1]) + (kb * rgbPixel[2]);
@@ -35,36 +77,61 @@
yuvPixel[1] = AVIF_CLAMP(yuvPixel[1], 0.0f, 1.0f);
yuvPixel[2] += 0.5f;
yuvPixel[2] = AVIF_CLAMP(yuvPixel[2], 0.0f, 1.0f);
- for (int plane = 0; plane < 3; ++plane) {
- dstImage->planes[plane][i + (j * dstImage->strides[plane])] = (uint16_t)avifRoundf(yuvPixel[plane] * dstMaxChannel);
- }
- // reformat alpha
- float alpha = (float)srcImage->planes[3][i + (j * srcImage->strides[3])] / srcMaxChannel;
- dstImage->planes[3][i + (j * dstImage->strides[3])] = (uint16_t)avifRoundf(alpha * dstMaxChannel);
+ int uvI = i >> state.formatInfo.chromaShiftX;
+ int uvJ = j >> state.formatInfo.chromaShiftY;
+ if (state.usesU16) {
+ uint16_t * pY = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_Y][(i * 2) + (j * image->yuvRowBytes[AVIF_CHAN_Y])];
+ *pY = (uint16_t)avifRoundf(yuvPixel[0] * maxChannel);
+ uint16_t * pU = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_U][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])];
+ *pU = (uint16_t)avifRoundf(yuvPixel[1] * maxChannel);
+ uint16_t * pV = (uint16_t *)&image->yuvPlanes[AVIF_CHAN_V][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])];
+ *pV = (uint16_t)avifRoundf(yuvPixel[2] * maxChannel);
+ } else {
+ image->yuvPlanes[AVIF_CHAN_Y][i + (j * image->yuvRowBytes[AVIF_CHAN_Y])] = (uint8_t)avifRoundf(yuvPixel[0] * maxChannel);
+ image->yuvPlanes[AVIF_CHAN_U][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])] = (uint8_t)avifRoundf(yuvPixel[1] * maxChannel);
+ image->yuvPlanes[AVIF_CHAN_V][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])] = (uint8_t)avifRoundf(yuvPixel[2] * maxChannel);
+ }
}
}
return AVIF_RESULT_OK;
}
-static avifResult reformatYUV444ToRGBA(avifImage * srcImage, avifImage * dstImage, int dstDepth)
+avifResult avifImageYUVToRGB(avifImage * image)
{
- // TODO: calculate coefficients
- const float kr = 0.2126f;
- const float kb = 0.0722f;
- const float kg = 1.0f - kr - kb;
+ if (!image->yuvPlanes[AVIF_CHAN_Y] || !image->yuvPlanes[AVIF_CHAN_U] || !image->yuvPlanes[AVIF_CHAN_V]) {
+ return AVIF_RESULT_REFORMAT_FAILED;
+ }
+
+ avifReformatState state;
+ if (!avifPrepareReformatState(image, &state)) {
+ return AVIF_RESULT_REFORMAT_FAILED;
+ }
+
+ avifImageAllocatePlanes(image, AVIF_PLANES_RGB);
+
+ const float kr = state.kr;
+ const float kg = state.kg;
+ const float kb = state.kb;
float yuvPixel[3];
float rgbPixel[3];
- float srcMaxChannel = (float)((1 << srcImage->depth) - 1);
- float dstMaxChannel = (float)((1 << dstImage->depth) - 1);
- for (int j = 0; j < srcImage->height; ++j) {
- for (int i = 0; i < srcImage->width; ++i) {
+ float maxChannel = (float)((1 << image->depth) - 1);
+ for (int j = 0; j < image->height; ++j) {
+ for (int i = 0; i < image->width; ++i) {
// Unpack YUV into normalized float
- yuvPixel[0] = srcImage->planes[0][i + (j * srcImage->strides[0])] / srcMaxChannel;
- yuvPixel[1] = srcImage->planes[1][i + (j * srcImage->strides[1])] / srcMaxChannel;
- yuvPixel[2] = srcImage->planes[2][i + (j * srcImage->strides[2])] / srcMaxChannel;
+ int uvI = i >> state.formatInfo.chromaShiftX;
+ int uvJ = j >> state.formatInfo.chromaShiftY;
+ if (state.usesU16) {
+ yuvPixel[0] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_Y][(i * 2) + (j * image->yuvRowBytes[AVIF_CHAN_Y])]) / maxChannel;
+ yuvPixel[1] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_U][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])]) / maxChannel;
+ yuvPixel[2] = *((uint16_t *)&image->yuvPlanes[AVIF_CHAN_V][(uvI * 2) + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])]) / maxChannel;
+ } else {
+ yuvPixel[0] = image->yuvPlanes[AVIF_CHAN_Y][i + (j * image->yuvRowBytes[AVIF_CHAN_Y])] / maxChannel;
+ yuvPixel[1] = image->yuvPlanes[AVIF_CHAN_U][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_U])] / maxChannel;
+ yuvPixel[2] = image->yuvPlanes[AVIF_CHAN_V][uvI + (uvJ * image->yuvRowBytes[AVIF_CHAN_V])] / maxChannel;
+ }
yuvPixel[1] -= 0.5f;
yuvPixel[2] -= 0.5f;
@@ -82,136 +149,20 @@
rgbPixel[0] = AVIF_CLAMP(R, 0.0f, 1.0f);
rgbPixel[1] = AVIF_CLAMP(G, 0.0f, 1.0f);
rgbPixel[2] = AVIF_CLAMP(B, 0.0f, 1.0f);
- for (int plane = 0; plane < 3; ++plane) {
- dstImage->planes[plane][i + (j * dstImage->strides[plane])] = (uint16_t)avifRoundf(rgbPixel[plane] * dstMaxChannel);
- }
- // reformat alpha
- float alpha = (float)srcImage->planes[3][i + (j * srcImage->strides[3])] / srcMaxChannel;
- dstImage->planes[3][i + (j * dstImage->strides[3])] = (uint16_t)avifRoundf(alpha * dstMaxChannel);
+ if (state.usesU16) {
+ uint16_t * pR = (uint16_t *)&image->rgbPlanes[AVIF_CHAN_R][(i * 2) + (j * image->rgbRowBytes[AVIF_CHAN_R])];
+ *pR = (uint16_t)avifRoundf(rgbPixel[0] * maxChannel);
+ uint16_t * pG = (uint16_t *)&image->rgbPlanes[AVIF_CHAN_G][(i * 2) + (j * image->rgbRowBytes[AVIF_CHAN_G])];
+ *pG = (uint16_t)avifRoundf(rgbPixel[1] * maxChannel);
+ uint16_t * pB = (uint16_t *)&image->rgbPlanes[AVIF_CHAN_B][(i * 2) + (j * image->rgbRowBytes[AVIF_CHAN_B])];
+ *pB = (uint16_t)avifRoundf(rgbPixel[2] * maxChannel);
+ } else {
+ image->rgbPlanes[AVIF_CHAN_R][i + (j * image->rgbRowBytes[AVIF_CHAN_R])] = (uint8_t)avifRoundf(rgbPixel[0] * maxChannel);
+ image->rgbPlanes[AVIF_CHAN_G][i + (j * image->rgbRowBytes[AVIF_CHAN_G])] = (uint8_t)avifRoundf(rgbPixel[1] * maxChannel);
+ image->rgbPlanes[AVIF_CHAN_B][i + (j * image->rgbRowBytes[AVIF_CHAN_B])] = (uint8_t)avifRoundf(rgbPixel[2] * maxChannel);
+ }
}
}
-
return AVIF_RESULT_OK;
}
-
-static avifResult reformatRGBAToYUV420(avifImage * srcImage, avifImage * dstImage, int dstDepth)
-{
- // TODO: calculate coefficients
- const float kr = 0.2126f;
- const float kb = 0.0722f;
- const float kg = 1.0f - kr - kb;
-
- float yuvPixel[3];
- float rgbPixel[3];
- float srcMaxChannel = (float)((1 << srcImage->depth) - 1);
- float dstMaxChannel = (float)((1 << dstImage->depth) - 1);
- for (int j = 0; j < srcImage->height; ++j) {
- for (int i = 0; i < srcImage->width; ++i) {
- // Unpack RGB into normalized float
- rgbPixel[0] = srcImage->planes[0][i + (j * srcImage->strides[0])] / srcMaxChannel;
- rgbPixel[1] = srcImage->planes[1][i + (j * srcImage->strides[1])] / srcMaxChannel;
- rgbPixel[2] = srcImage->planes[2][i + (j * srcImage->strides[2])] / srcMaxChannel;
-
- // RGB -> YUV conversion
- float Y = (kr * rgbPixel[0]) + (kg * rgbPixel[1]) + (kb * rgbPixel[2]);
- yuvPixel[0] = Y;
- yuvPixel[1] = (rgbPixel[2] - Y) / (2 * (1 - kb));
- yuvPixel[2] = (rgbPixel[0] - Y) / (2 * (1 - kr));
-
- // Stuff YUV into unorm16 color layer
- yuvPixel[0] = AVIF_CLAMP(yuvPixel[0], 0.0f, 1.0f);
- yuvPixel[1] += 0.5f;
- yuvPixel[1] = AVIF_CLAMP(yuvPixel[1], 0.0f, 1.0f);
- yuvPixel[2] += 0.5f;
- yuvPixel[2] = AVIF_CLAMP(yuvPixel[2], 0.0f, 1.0f);
-
- dstImage->planes[0][i + (j * dstImage->strides[0])] = (uint16_t)avifRoundf(yuvPixel[0] * dstMaxChannel);
-
- int x = i >> 1;
- int y = j >> 1;
- dstImage->planes[1][x + (y * dstImage->strides[1])] = (uint16_t)avifRoundf(yuvPixel[1] * dstMaxChannel);
- dstImage->planes[2][x + (y * dstImage->strides[2])] = (uint16_t)avifRoundf(yuvPixel[2] * dstMaxChannel);
-
- // reformat alpha
- float alpha = (float)srcImage->planes[3][i + (j * srcImage->strides[3])] / srcMaxChannel;
- dstImage->planes[3][i + (j * dstImage->strides[3])] = (uint16_t)avifRoundf(alpha * dstMaxChannel);
- }
- }
-
- return AVIF_RESULT_OK;
-}
-
-static avifResult reformatYUV420ToRGBA(avifImage * srcImage, avifImage * dstImage, int dstDepth)
-{
- // TODO: calculate coefficients
- const float kr = 0.2126f;
- const float kb = 0.0722f;
- const float kg = 1.0f - kr - kb;
-
- float yuvPixel[3];
- float rgbPixel[3];
- float srcMaxChannel = (float)((1 << srcImage->depth) - 1);
- float dstMaxChannel = (float)((1 << dstImage->depth) - 1);
- for (int j = 0; j < srcImage->height; ++j) {
- for (int i = 0; i < srcImage->width; ++i) {
- // Unpack YUV into normalized float
- int x = i >> 1;
- int y = j >> 1;
- yuvPixel[0] = srcImage->planes[0][i + (j * srcImage->strides[0])] / srcMaxChannel;
- yuvPixel[1] = srcImage->planes[1][x + (y * srcImage->strides[1])] / srcMaxChannel;
- yuvPixel[2] = srcImage->planes[2][x + (y * srcImage->strides[2])] / srcMaxChannel;
- yuvPixel[1] -= 0.5f;
- yuvPixel[2] -= 0.5f;
-
- float Y = yuvPixel[0];
- float Cb = yuvPixel[1];
- float Cr = yuvPixel[2];
-
- float R = Y + (2 * (1 - kr)) * Cr;
- float B = Y + (2 * (1 - kb)) * Cb;
- float G = Y - (
- (2 * ((kr * (1 - kr) * Cr) + (kb * (1 - kb) * Cb)))
- /
- kg);
-
- rgbPixel[0] = AVIF_CLAMP(R, 0.0f, 1.0f);
- rgbPixel[1] = AVIF_CLAMP(G, 0.0f, 1.0f);
- rgbPixel[2] = AVIF_CLAMP(B, 0.0f, 1.0f);
- for (int plane = 0; plane < 3; ++plane) {
- dstImage->planes[plane][i + (j * dstImage->strides[plane])] = (uint16_t)avifRoundf(rgbPixel[plane] * dstMaxChannel);
- }
-
- // reformat alpha
- float alpha = (float)srcImage->planes[3][i + (j * srcImage->strides[3])] / srcMaxChannel;
- dstImage->planes[3][i + (j * dstImage->strides[3])] = (uint16_t)avifRoundf(alpha * dstMaxChannel);
- }
- }
-
- return AVIF_RESULT_OK;
-}
-
-avifResult avifImageReformatPixels(avifImage * srcImage, avifImage * dstImage, avifPixelFormat dstPixelFormat, int dstDepth)
-{
- avifImageCreatePixels(dstImage, dstPixelFormat, srcImage->width, srcImage->height, dstDepth);
- switch (srcImage->pixelFormat) {
- case AVIF_PIXEL_FORMAT_RGBA:
- switch (dstPixelFormat) {
- case AVIF_PIXEL_FORMAT_YUV444:
- return reformatRGBAToYUV444(srcImage, dstImage, dstDepth);
- case AVIF_PIXEL_FORMAT_YUV420:
- return reformatRGBAToYUV420(srcImage, dstImage, dstDepth);
- }
- case AVIF_PIXEL_FORMAT_YUV444:
- switch (dstPixelFormat) {
- case AVIF_PIXEL_FORMAT_RGBA:
- return reformatYUV444ToRGBA(srcImage, dstImage, dstDepth);
- }
- case AVIF_PIXEL_FORMAT_YUV420:
- switch (dstPixelFormat) {
- case AVIF_PIXEL_FORMAT_RGBA:
- return reformatYUV420ToRGBA(srcImage, dstImage, dstDepth);
- }
- }
- return AVIF_RESULT_REFORMAT_FAILED;
-}
diff --git a/src/write.c b/src/write.c
index 1b50eee..ce0981c 100644
--- a/src/write.c
+++ b/src/write.c
@@ -13,6 +13,10 @@
avifResult avifImageWrite(avifImage * image, avifRawData * output, int quality)
{
+ if ((image->depth != 8) && (image->depth != 10) && (image->depth != 12)) {
+ return AVIF_RESULT_UNSUPPORTED_DEPTH;
+ }
+
avifResult result = AVIF_RESULT_UNKNOWN_ERROR;
avifRawData colorOBU = AVIF_RAW_DATA_EMPTY;
avifRawData alphaOBU = AVIF_RAW_DATA_EMPTY;
@@ -23,45 +27,37 @@
// -----------------------------------------------------------------------
// Reformat pixels, if need be
- avifPixelFormat dstPixelFormat = image->pixelFormat;
- if (image->pixelFormat == AVIF_PIXEL_FORMAT_RGBA) {
- // AV1 doesn't support RGB, reformat
- dstPixelFormat = AVIF_PIXEL_FORMAT_YUV444;
+ if (!image->width || !image->height || !image->depth) {
+ result = AVIF_RESULT_NO_CONTENT;
+ goto writeCleanup;
}
-#if 0 // TODO: implement choice in depth
- int dstDepth = AVIF_CLAMP(image->depth, 8, 12);
- if ((dstDepth == 9) || (dstDepth == 11)) {
- ++dstDepth;
- }
-#else
- int dstDepth = 12;
-#endif
-
- avifImage * pixelImage = image;
- avifImage * reformattedImage = NULL;
- if ((image->pixelFormat != dstPixelFormat) || (image->depth != dstDepth)) {
- reformattedImage = avifImageCreate();
- avifResult reformatResult = avifImageReformatPixels(image, reformattedImage, dstPixelFormat, dstDepth);
- if (reformatResult != AVIF_RESULT_OK) {
- result = reformatResult;
+ if ((image->yuvFormat == AVIF_PIXEL_FORMAT_NONE) || !image->yuvPlanes[AVIF_CHAN_Y] || !image->yuvPlanes[AVIF_CHAN_U] || !image->yuvPlanes[AVIF_CHAN_V]) {
+ if (!image->rgbPlanes[AVIF_CHAN_R] || !image->rgbPlanes[AVIF_CHAN_G] || !image->rgbPlanes[AVIF_CHAN_B]) {
+ result = AVIF_RESULT_NO_CONTENT;
goto writeCleanup;
}
- pixelImage = reformattedImage;
+
+ avifImageFreePlanes(image, AVIF_PLANES_YUV);
+ if (image->yuvFormat == AVIF_PIXEL_FORMAT_NONE) {
+ result = AVIF_RESULT_NO_YUV_FORMAT_SELECTED;
+ goto writeCleanup;
+ }
+ avifImageRGBToYUV(image);
}
// -----------------------------------------------------------------------
// Encode AV1 OBUs
- if (!encodeOBU(pixelImage, AVIF_FALSE, &colorOBU, quality)) {
+ if (!encodeOBU(image, AVIF_FALSE, &colorOBU, quality)) {
result = AVIF_RESULT_ENCODE_COLOR_FAILED;
goto writeCleanup;
}
// Skip alpha creation on opaque images
avifBool hasAlpha = AVIF_FALSE;
- if (!avifImageIsOpaque(pixelImage)) {
- if (!encodeOBU(pixelImage, AVIF_TRUE, &alphaOBU, quality)) {
+ if (!avifImageIsOpaque(image)) {
+ if (!encodeOBU(image, AVIF_TRUE, &alphaOBU, quality)) {
result = AVIF_RESULT_ENCODE_ALPHA_FAILED;
goto writeCleanup;
}
@@ -189,28 +185,88 @@
// Cleanup
writeCleanup:
- if (reformattedImage) {
- avifImageDestroy(reformattedImage);
- }
avifRawDataFree(&colorOBU);
avifRawDataFree(&alphaOBU);
return result;
}
+static aom_img_fmt_t avifImageCalcAOMFmt(avifImage * image, avifBool alphaOnly, int * yShift)
+{
+ *yShift = 0;
+
+ aom_img_fmt_t fmt;
+ if (alphaOnly) {
+ // We're going monochrome, who cares about chroma quality
+ fmt = AOM_IMG_FMT_I420;
+ *yShift = 1;
+ } else {
+ switch (image->yuvFormat) {
+ case AVIF_PIXEL_FORMAT_YUV444:
+ fmt = AOM_IMG_FMT_I444;
+ break;
+ case AVIF_PIXEL_FORMAT_YUV422:
+ fmt = AOM_IMG_FMT_I422;
+ break;
+ case AVIF_PIXEL_FORMAT_YUV420:
+ fmt = AOM_IMG_FMT_I420;
+ *yShift = 1;
+ break;
+ case AVIF_PIXEL_FORMAT_YV12:
+ fmt = AOM_IMG_FMT_YV12;
+ *yShift = 1;
+ break;
+ default:
+ return AOM_IMG_FMT_NONE;
+ }
+ }
+
+ if (image->depth > 8) {
+ fmt |= AOM_IMG_FMT_HIGHBITDEPTH;
+ }
+
+ return fmt;
+}
+
static avifBool encodeOBU(avifImage * image, avifBool alphaOnly, avifRawData * outputOBU, int quality)
{
avifBool success = AVIF_FALSE;
aom_codec_iface_t * encoder_interface = aom_codec_av1_cx();
aom_codec_ctx_t encoder;
+ int yShift = 0;
+ aom_img_fmt_t aomFormat = avifImageCalcAOMFmt(image, alphaOnly, &yShift);
+ if (aomFormat == AOM_IMG_FMT_NONE) {
+ return AVIF_FALSE;
+ }
+
struct aom_codec_enc_cfg cfg;
aom_codec_enc_config_default(encoder_interface, &cfg, 0);
+ // Profile 0. 8-bit and 10-bit 4:2:0 and 4:0:0 only.
+ // Profile 1. 8-bit and 10-bit 4:4:4
// Profile 2. 8-bit and 10-bit 4:2:2
// 12-bit 4:0:0, 4:2:2 and 4:4:4
- cfg.g_profile = 2;
- cfg.g_bit_depth = AOM_BITS_12;
- cfg.g_input_bit_depth = 12;
+ if (image->depth == 12) {
+ // Only profile 2 can handle 12 bit
+ cfg.g_profile = 2;
+ } else {
+ // 8-bit or 10-bit
+
+ if (alphaOnly) {
+ // Assuming aomImage->monochrome makes it 4:0:0
+ cfg.g_profile = 0;
+ } else {
+ switch (image->yuvFormat) {
+ case AVIF_PIXEL_FORMAT_YUV444: cfg.g_profile = 1; break;
+ case AVIF_PIXEL_FORMAT_YUV422: cfg.g_profile = 2; break;
+ case AVIF_PIXEL_FORMAT_YUV420: cfg.g_profile = 0; break;
+ case AVIF_PIXEL_FORMAT_YV12: cfg.g_profile = 0; break;
+ }
+ }
+ }
+
+ cfg.g_bit_depth = image->depth;
+ cfg.g_input_bit_depth = image->depth;
cfg.g_w = image->width;
cfg.g_h = image->height;
// cfg.g_threads = ...;
@@ -223,38 +279,46 @@
cfg.rc_max_quantizer = quality;
}
- aom_codec_enc_init(&encoder, encoder_interface, &cfg, AOM_CODEC_USE_HIGHBITDEPTH);
- aom_codec_control(&encoder, AV1E_SET_COLOR_RANGE, AOM_CR_FULL_RANGE);
+ uint32_t encoderFlags = 0;
+ if (image->depth > 8) {
+ encoderFlags |= AOM_CODEC_USE_HIGHBITDEPTH;
+ }
+ aom_codec_enc_init(&encoder, encoder_interface, &cfg, encoderFlags);
if (lossless) {
aom_codec_control(&encoder, AV1E_SET_LOSSLESS, 1);
}
- aom_image_t * aomImage = aom_img_alloc(NULL, AOM_IMG_FMT_I44416, image->width, image->height, 16);
- aomImage->range = AOM_CR_FULL_RANGE; // always use full range
- if (alphaOnly) {
- }
+ int uvHeight = image->height >> yShift;
+ aom_image_t * aomImage = aom_img_alloc(NULL, aomFormat, image->width, image->height, 16);
if (alphaOnly) {
+ aomImage->range = AOM_CR_FULL_RANGE; // Alpha is always full range
+ aom_codec_control(&encoder, AV1E_SET_COLOR_RANGE, aomImage->range);
aomImage->monochrome = 1;
for (int j = 0; j < image->height; ++j) {
- for (int i = 0; i < image->width; ++i) {
- for (int plane = 0; plane < 3; ++plane) {
- uint16_t * planeChannel = (uint16_t *)&aomImage->planes[plane][(j * aomImage->stride[plane]) + (2 * i)];
- if (plane == 0) {
- *planeChannel = image->planes[3][i + (j * image->strides[plane])];
- } else {
- *planeChannel = 0;
- }
- }
- }
+ uint8_t * srcAlphaRow = &image->alphaPlane[j * image->alphaRowBytes];
+ uint8_t * dstAlphaRow = &aomImage->planes[0][j * aomImage->stride[0]];
+ memcpy(dstAlphaRow, srcAlphaRow, image->alphaRowBytes);
+ }
+
+ for (int j = 0; j < uvHeight; ++j) {
+ // Zero out U and V
+ memset(&aomImage->planes[1][j * aomImage->stride[1]], 0, aomImage->stride[1]);
+ memset(&aomImage->planes[2][j * aomImage->stride[2]], 0, aomImage->stride[2]);
}
} else {
+ aomImage->range = (image->yuvRange == AVIF_RANGE_FULL) ? AOM_CR_FULL_RANGE : AOM_CR_STUDIO_RANGE;
+ aom_codec_control(&encoder, AV1E_SET_COLOR_RANGE, aomImage->range);
for (int j = 0; j < image->height; ++j) {
- for (int i = 0; i < image->width; ++i) {
- for (int plane = 0; plane < 3; ++plane) {
- uint16_t * planeChannel = (uint16_t *)&aomImage->planes[plane][(j * aomImage->stride[plane]) + (2 * i)];
- *planeChannel = image->planes[plane][i + (j * image->strides[plane])];
+ for (int yuvPlane = 0; yuvPlane < 3; ++yuvPlane) {
+ if ((yuvPlane > 0) && (j >= uvHeight)) {
+ // Bail out if we're on a half-height UV plane
+ break;
}
+
+ uint8_t * srcRow = &image->yuvPlanes[yuvPlane][j * image->yuvRowBytes[yuvPlane]];
+ uint8_t * dstRow = &aomImage->planes[yuvPlane][j * aomImage->stride[yuvPlane]];
+ memcpy(dstRow, srcRow, image->yuvRowBytes[yuvPlane]);
}
}
}
@@ -281,11 +345,26 @@
static avifBool avifImageIsOpaque(avifImage * image)
{
+ if (!image->alphaPlane) {
+ return AVIF_TRUE;
+ }
+
int maxChannel = (1 << image->depth) - 1;
- for (int j = 0; j < image->height; ++j) {
- for (int i = 0; i < image->width; ++i) {
- if (image->planes[3][i + (j * image->strides[3])] != maxChannel) {
- return AVIF_FALSE;
+ if (avifImageUsesU16(image)) {
+ for (int j = 0; j < image->height; ++j) {
+ for (int i = 0; i < image->width; ++i) {
+ uint16_t * p = (uint16_t *)&image->alphaPlane[(i * 2) + (j * image->alphaRowBytes)];
+ if (*p != maxChannel) {
+ return AVIF_FALSE;
+ }
+ }
+ }
+ } else {
+ for (int j = 0; j < image->height; ++j) {
+ for (int i = 0; i < image->width; ++i) {
+ if (image->alphaPlane[i + (j * image->alphaRowBytes)] != maxChannel) {
+ return AVIF_FALSE;
+ }
}
}
}