Add support for sharp yuv in avifImageRGBToYUV.
Add --sharpyuv flag to avifenc.
diff --git a/apps/avifenc.c b/apps/avifenc.c
index 485b350..2e72a1c 100644
--- a/apps/avifenc.c
+++ b/apps/avifenc.c
@@ -58,6 +58,7 @@
printf(" -y,--yuv FORMAT : Output format [default=auto, 444, 422, 420, 400]. Ignored for y4m or stdin (y4m format is retained)\n");
printf(" For JPEG, auto honors the JPEG's internal format, if possible. For all other cases, auto defaults to 444\n");
printf(" -p,--premultiply : Premultiply color by the alpha channel and signal this in the AVIF\n");
+ printf(" --sharpyuv : Use sharp RGB to YUV420 conversion (if supported). Ignored for y4m or if output is not 420.\n");
printf(" --stdin : Read y4m frames from stdin instead of files; no input filenames allowed, must set before offering output filename\n");
printf(" --cicp,--nclx P/T/M : Set CICP values (nclx colr box) (3 raw numbers, use -r to set range flag)\n");
printf(" P = color primaries\n");
@@ -255,7 +256,11 @@
return (input->fileIndex < input->filesCount);
}
-static avifAppFileFormat avifInputReadImage(avifInput * input, avifImage * image, uint32_t * outDepth, avifAppSourceTiming * sourceTiming)
+static avifAppFileFormat avifInputReadImage(avifInput * input,
+ avifImage * image,
+ uint32_t * outDepth,
+ avifAppSourceTiming * sourceTiming,
+ avifRGBToYUVFlags flags)
{
if (sourceTiming) {
// A source timing of all 0s is a sentinel value hinting that the value is unset / should be
@@ -282,6 +287,7 @@
const avifAppFileFormat nextInputFormat = avifReadImage(input->files[input->fileIndex].filename,
input->requestedFormat,
input->requestedDepth,
+ flags,
image,
outDepth,
sourceTiming,
@@ -466,6 +472,7 @@
avifColorPrimaries colorPrimaries = AVIF_COLOR_PRIMARIES_UNSPECIFIED;
avifTransferCharacteristics transferCharacteristics = AVIF_TRANSFER_CHARACTERISTICS_UNSPECIFIED;
avifMatrixCoefficients matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_BT601;
+ avifRGBToYUVFlags flags = AVIF_RGB_TO_YUV_DEFAULT;
int argIndex = 1;
while (argIndex < argc) {
@@ -760,6 +767,8 @@
matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_IDENTITY; // this is key for lossless
} else if (!strcmp(arg, "-p") || !strcmp(arg, "--premultiply")) {
premultiplyAlpha = AVIF_TRUE;
+ } else if (!strcmp(arg, "--sharpyuv")) {
+ flags |= AVIF_CHROMA_DOWNSAMPLING_SHARP_YUV;
} else if (arg[0] == '-') {
fprintf(stderr, "ERROR: unrecognized option %s\n\n", arg);
syntax();
@@ -823,7 +832,7 @@
avifInputFile * firstFile = avifInputGetNextFile(&input);
uint32_t sourceDepth = 0;
avifAppSourceTiming firstSourceTiming;
- avifAppFileFormat inputFormat = avifInputReadImage(&input, image, &sourceDepth, &firstSourceTiming);
+ avifAppFileFormat inputFormat = avifInputReadImage(&input, image, &sourceDepth, &firstSourceTiming, flags);
if (inputFormat == AVIF_APP_FILE_FORMAT_UNKNOWN) {
fprintf(stderr, "Cannot determine input file format: %s\n", firstFile->filename);
returnCode = 1;
@@ -1031,7 +1040,7 @@
cellImage->alphaPremultiplied = image->alphaPremultiplied;
gridCells[gridCellIndex] = cellImage;
- avifAppFileFormat nextInputFormat = avifInputReadImage(&input, cellImage, NULL, NULL);
+ avifAppFileFormat nextInputFormat = avifInputReadImage(&input, cellImage, NULL, NULL, flags);
if (nextInputFormat == AVIF_APP_FILE_FORMAT_UNKNOWN) {
returnCode = 1;
goto cleanup;
@@ -1170,7 +1179,7 @@
nextImage->yuvRange = image->yuvRange;
nextImage->alphaPremultiplied = image->alphaPremultiplied;
- avifAppFileFormat nextInputFormat = avifInputReadImage(&input, nextImage, NULL, NULL);
+ avifAppFileFormat nextInputFormat = avifInputReadImage(&input, nextImage, NULL, NULL, flags);
if (nextInputFormat == AVIF_APP_FILE_FORMAT_UNKNOWN) {
returnCode = 1;
goto cleanup;
diff --git a/apps/shared/avifjpeg.c b/apps/shared/avifjpeg.c
index 3ff9a1e..fcaa4ce 100644
--- a/apps/shared/avifjpeg.c
+++ b/apps/shared/avifjpeg.c
@@ -239,7 +239,7 @@
// longjmp. But GCC's -Wclobbered warning may have trouble figuring that out, so
// we preemptively declare it as volatile.
-avifBool avifJPEGRead(const char * inputFilename, avifImage * avif, avifPixelFormat requestedFormat, uint32_t requestedDepth)
+avifBool avifJPEGRead(const char * inputFilename, avifImage * avif, avifPixelFormat requestedFormat, uint32_t requestedDepth, avifRGBToYUVFlags flags)
{
volatile avifBool ret = AVIF_FALSE;
uint8_t * volatile iccData = NULL;
@@ -314,7 +314,7 @@
memcpy(pixelRow, buffer[0], rgb.rowBytes);
++row;
}
- if (avifImageRGBToYUV(avif, &rgb, AVIF_RGB_TO_YUV_DEFAULT) != AVIF_RESULT_OK) {
+ if (avifImageRGBToYUV(avif, &rgb, flags) != AVIF_RESULT_OK) {
fprintf(stderr, "Conversion to YUV failed: %s\n", inputFilename);
goto cleanup;
}
diff --git a/apps/shared/avifjpeg.h b/apps/shared/avifjpeg.h
index a928fa4..5e2b981 100644
--- a/apps/shared/avifjpeg.h
+++ b/apps/shared/avifjpeg.h
@@ -10,7 +10,7 @@
extern "C" {
#endif
-avifBool avifJPEGRead(const char * inputFilename, avifImage * avif, avifPixelFormat requestedFormat, uint32_t requestedDepth);
+avifBool avifJPEGRead(const char * inputFilename, avifImage * avif, avifPixelFormat requestedFormat, uint32_t requestedDepth, avifRGBToYUVFlags flags);
avifBool avifJPEGWrite(const char * outputFilename, const avifImage * avif, int jpegQuality, avifYUVToRGBFlags conversionFlags);
#ifdef __cplusplus
diff --git a/apps/shared/avifpng.c b/apps/shared/avifpng.c
index 7f8ce45..14ce4c5 100644
--- a/apps/shared/avifpng.c
+++ b/apps/shared/avifpng.c
@@ -31,7 +31,12 @@
// modified between setjmp and longjmp. But GCC's -Wclobbered warning may have
// trouble figuring that out, so we preemptively declare them as volatile.
-avifBool avifPNGRead(const char * inputFilename, avifImage * avif, avifPixelFormat requestedFormat, uint32_t requestedDepth, uint32_t * outPNGDepth)
+avifBool avifPNGRead(const char * inputFilename,
+ avifImage * avif,
+ avifPixelFormat requestedFormat,
+ uint32_t requestedDepth,
+ avifRGBToYUVFlags flags,
+ uint32_t * outPNGDepth)
{
volatile avifBool readResult = AVIF_FALSE;
png_structp png = NULL;
@@ -153,7 +158,7 @@
rowPointers[y] = &rgb.pixels[y * rgb.rowBytes];
}
png_read_image(png, rowPointers);
- if (avifImageRGBToYUV(avif, &rgb, AVIF_RGB_TO_YUV_DEFAULT) != AVIF_RESULT_OK) {
+ if (avifImageRGBToYUV(avif, &rgb, flags) != AVIF_RESULT_OK) {
fprintf(stderr, "Conversion to YUV failed: %s\n", inputFilename);
goto cleanup;
}
diff --git a/apps/shared/avifpng.h b/apps/shared/avifpng.h
index 20ffd93..7e946e4 100644
--- a/apps/shared/avifpng.h
+++ b/apps/shared/avifpng.h
@@ -11,7 +11,12 @@
#endif
// if (requestedDepth == 0), do best-fit
-avifBool avifPNGRead(const char * inputFilename, avifImage * avif, avifPixelFormat requestedFormat, uint32_t requestedDepth, uint32_t * outPNGDepth);
+avifBool avifPNGRead(const char * inputFilename,
+ avifImage * avif,
+ avifPixelFormat requestedFormat,
+ uint32_t requestedDepth,
+ avifRGBToYUVFlags flags,
+ uint32_t * outPNGDepth);
avifBool avifPNGWrite(const char * outputFilename,
const avifImage * avif,
uint32_t requestedDepth,
diff --git a/apps/shared/avifutil.c b/apps/shared/avifutil.c
index 01c8ec9..b38082a 100644
--- a/apps/shared/avifutil.c
+++ b/apps/shared/avifutil.c
@@ -227,6 +227,7 @@
avifAppFileFormat avifReadImage(const char * filename,
avifPixelFormat requestedFormat,
int requestedDepth,
+ avifRGBToYUVFlags flags,
avifImage * image,
uint32_t * outDepth,
avifAppSourceTiming * sourceTiming,
@@ -241,14 +242,14 @@
*outDepth = image->depth;
}
} else if (format == AVIF_APP_FILE_FORMAT_JPEG) {
- if (!avifJPEGRead(filename, image, requestedFormat, requestedDepth)) {
+ if (!avifJPEGRead(filename, image, requestedFormat, requestedDepth, flags)) {
return AVIF_APP_FILE_FORMAT_UNKNOWN;
}
if (outDepth) {
*outDepth = 8;
}
} else if (format == AVIF_APP_FILE_FORMAT_PNG) {
- if (!avifPNGRead(filename, image, requestedFormat, requestedDepth, outDepth)) {
+ if (!avifPNGRead(filename, image, requestedFormat, requestedDepth, flags, outDepth)) {
return AVIF_APP_FILE_FORMAT_UNKNOWN;
}
} else {
diff --git a/apps/shared/avifutil.h b/apps/shared/avifutil.h
index 8e0b1b5..6543787 100644
--- a/apps/shared/avifutil.h
+++ b/apps/shared/avifutil.h
@@ -63,6 +63,7 @@
avifAppFileFormat avifReadImage(const char * filename,
avifPixelFormat requestedFormat,
int requestedDepth,
+ avifRGBToYUVFlags flags,
avifImage * image,
uint32_t * outDepth,
avifAppSourceTiming * sourceTiming,
diff --git a/doc/avifenc.1.md b/doc/avifenc.1.md
index a99cf1a..bf381fb 100644
--- a/doc/avifenc.1.md
+++ b/doc/avifenc.1.md
@@ -69,6 +69,10 @@
**-p**, **\--premultiply**
: Premultiply color by the alpha channel and signal this in the AVIF.
+**\--sharpyuv**
+: Use sharp RGB to YUV420 conversion (if supported). Ignored for y4m or if
+ output is not 420.
+
**\--stdin**
: Read y4m frames from stdin instead of files.
No input filenames allowed, must be set before specifying the output
diff --git a/include/avif/avif.h b/include/avif/avif.h
index 6a1ea87..5a99884 100644
--- a/include/avif/avif.h
+++ b/include/avif/avif.h
@@ -586,7 +586,7 @@
// Conversion options.
typedef enum avifRGBToYUVFlag
{
- AVIF_RGB_TO_YUV_DEFAULT = 0, // Uses the first available upsampling filter among:
+ AVIF_RGB_TO_YUV_DEFAULT = 0, // Uses the first available downsampling filter among:
// libyuv average, built-in average
// libyuv preference
@@ -596,12 +596,14 @@
// Conversion from RGB to YUV 4:2:2 or YUV 4:2:0 (ignored in all other cases)
// Chroma downsampling filter. Set at most one:
- AVIF_CHROMA_DOWNSAMPLING_AVERAGE = (1 << 10), // only use the averaging filter (libyuv or built-in)
+ AVIF_CHROMA_DOWNSAMPLING_AVERAGE = (1 << 10), // only use the averaging filter (libyuv or built-in)
+ AVIF_CHROMA_DOWNSAMPLING_SHARP_YUV = (1 << 11), // only use sharp yuv filter (libsharpyuv),
+ // available for 4:2:0 only, ignored for 4:2:2
} avifRGBToYUVFlag;
typedef uint32_t avifRGBToYUVFlags;
typedef enum avifYUVToRGBFlag
{
- AVIF_YUV_TO_RGB_DEFAULT = 0, // Uses the first available downsampling filter among:
+ AVIF_YUV_TO_RGB_DEFAULT = 0, // Uses the first available upsampling filter among:
// libyuv bilinear, libyuv nearest-neighbor, built-in bilinear
// libyuv preference
diff --git a/src/reformat.c b/src/reformat.c
index 099778a..d792a85 100644
--- a/src/reformat.c
+++ b/src/reformat.c
@@ -224,17 +224,28 @@
}
}
- avifBool convertedWithLibYUV = AVIF_FALSE;
- if (!(flags & AVIF_RGB_TO_YUV_AVOID_LIBYUV) && (alphaMode == AVIF_ALPHA_MULTIPLY_MODE_NO_OP)) {
+ avifBool converted = AVIF_FALSE;
+
+ // Try converting with libsharpyuv.
+ if ((flags & AVIF_CHROMA_DOWNSAMPLING_SHARP_YUV) && image->yuvFormat == AVIF_PIXEL_FORMAT_YUV420) {
+ const avifResult libSharpYUVResult = avifImageRGBToYUVLibSharpYUV(image, rgb, &state);
+ if (libSharpYUVResult != AVIF_RESULT_OK) {
+ // Return the error if sharpyuv was requested but failed for any reason, including libsharpyuv not being available.
+ return libSharpYUVResult;
+ }
+ converted = AVIF_TRUE;
+ }
+
+ if (!converted && !(flags & AVIF_RGB_TO_YUV_AVOID_LIBYUV) && (alphaMode == AVIF_ALPHA_MULTIPLY_MODE_NO_OP)) {
avifResult libyuvResult = avifImageRGBToYUVLibYUV(image, rgb);
if (libyuvResult == AVIF_RESULT_OK) {
- convertedWithLibYUV = AVIF_TRUE;
+ converted = AVIF_TRUE;
} else if (libyuvResult != AVIF_RESULT_NOT_IMPLEMENTED) {
return libyuvResult;
}
}
- if (!convertedWithLibYUV) {
+ if (!converted) {
const float kr = state.kr;
const float kg = state.kg;
const float kb = state.kb;
diff --git a/tests/gtest/are_images_equal.cc b/tests/gtest/are_images_equal.cc
index 70d02d1..1bfe65f 100644
--- a/tests/gtest/are_images_equal.cc
+++ b/tests/gtest/are_images_equal.cc
@@ -31,8 +31,8 @@
// Make sure no color conversion happens.
decoded[i]->matrixCoefficients = AVIF_MATRIX_COEFFICIENTS_IDENTITY;
if (avifReadImage(argv[i + 1], requestedFormat, kRequestedDepth,
- decoded[i].get(), &depth[i], nullptr,
- nullptr) == AVIF_APP_FILE_FORMAT_UNKNOWN) {
+ AVIF_RGB_TO_YUV_DEFAULT, decoded[i].get(), &depth[i],
+ nullptr, nullptr) == AVIF_APP_FILE_FORMAT_UNKNOWN) {
std::cerr << "Image " << argv[i + 1] << " cannot be read." << std::endl;
return 2;
}
diff --git a/tests/gtest/avifrgbtoyuvtest.cc b/tests/gtest/avifrgbtoyuvtest.cc
index b7bb0d2..08e417e 100644
--- a/tests/gtest/avifrgbtoyuvtest.cc
+++ b/tests/gtest/avifrgbtoyuvtest.cc
@@ -115,6 +115,7 @@
: public testing::TestWithParam<
std::tuple</*rgb_depth=*/int, /*yuv_depth=*/int, avifRGBFormat,
avifPixelFormat, avifRange, avifMatrixCoefficients,
+ /*sharpYuv=*/bool,
/*add_noise=*/bool, /*rgb_step=*/uint32_t,
/*max_abs_average_diff=*/double, /*min_psnr=*/double>> {};
@@ -128,15 +129,16 @@
const avifPixelFormat yuv_format = std::get<3>(GetParam());
const avifRange yuv_range = std::get<4>(GetParam());
const avifMatrixCoefficients matrix_coefficients = std::get<5>(GetParam());
+ const avifRGBToYUVFlags rgb_to_yuv_flags = std::get<6>(GetParam());
// Whether to add noise to the input RGB samples. Should only impact
// subsampled chroma (4:2:2 and 4:2:0).
- const bool add_noise = std::get<6>(GetParam());
+ const bool add_noise = std::get<7>(GetParam());
// Testing each RGB combination would be more accurate but results are similar
// with faster settings.
- const uint32_t rgb_step = std::get<7>(GetParam());
+ const uint32_t rgb_step = std::get<8>(GetParam());
// Thresholds to pass.
- const double max_abs_average_diff = std::get<8>(GetParam());
- const double min_psnr = std::get<9>(GetParam());
+ const double max_abs_average_diff = std::get<9>(GetParam());
+ const double min_psnr = std::get<10>(GetParam());
// Deduced constants.
const bool is_monochrome =
(yuv_format == AVIF_PIXEL_FORMAT_YUV400); // If so, only test grey input.
@@ -182,7 +184,7 @@
ModifyImageChannel(&src_rgb, offsets.b, kBlueNoise);
}
- ASSERT_EQ(avifImageRGBToYUV(yuv.get(), &src_rgb, AVIF_RGB_TO_YUV_DEFAULT),
+ ASSERT_EQ(avifImageRGBToYUV(yuv.get(), &src_rgb, rgb_to_yuv_flags),
AVIF_RESULT_OK);
ASSERT_EQ(avifImageYUVToRGB(yuv.get(), &dst_rgb, AVIF_YUV_TO_RGB_DEFAULT),
AVIF_RESULT_OK);
@@ -316,6 +318,8 @@
// avifMatrixCoefficients-typed constants for testing::Values() to work on MSVC.
constexpr avifMatrixCoefficients kMatrixCoefficientsBT601 =
AVIF_MATRIX_COEFFICIENTS_BT601;
+constexpr avifMatrixCoefficients kMatrixCoefficientsBT709 =
+ AVIF_MATRIX_COEFFICIENTS_BT709;
constexpr avifMatrixCoefficients kMatrixCoefficientsIdentity =
AVIF_MATRIX_COEFFICIENTS_IDENTITY;
@@ -326,6 +330,7 @@
/*yuv_depth=*/Values(8), Values(AVIF_RGB_FORMAT_RGBA),
Values(AVIF_PIXEL_FORMAT_YUV420), Values(AVIF_RANGE_FULL),
Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
/*add_noise=*/Values(true),
/*rgb_step=*/Values(3),
/*max_abs_average_diff=*/Values(0.1), // The color drift is almost
@@ -335,39 +340,39 @@
// Keeping RGB samples in full range and same or higher bit depth should not
// bring any loss in the roundtrip.
-INSTANTIATE_TEST_SUITE_P(Identity8b, RGBToYUVTest,
- Combine(/*rgb_depth=*/Values(8),
- /*yuv_depth=*/Values(8, 10, 12),
- ValuesIn(kAllRgbFormats),
- Values(AVIF_PIXEL_FORMAT_YUV444),
- Values(AVIF_RANGE_FULL),
- Values(kMatrixCoefficientsIdentity),
- /*add_noise=*/Values(true),
- /*rgb_step=*/Values(31),
- /*max_abs_average_diff=*/Values(0.),
- /*min_psnr=*/Values(99.)));
-INSTANTIATE_TEST_SUITE_P(Identity10b, RGBToYUVTest,
- Combine(/*rgb_depth=*/Values(10),
- /*yuv_depth=*/Values(10, 12),
- ValuesIn(kAllRgbFormats),
- Values(AVIF_PIXEL_FORMAT_YUV444),
- Values(AVIF_RANGE_FULL),
- Values(kMatrixCoefficientsIdentity),
- /*add_noise=*/Values(true),
- /*rgb_step=*/Values(101),
- /*max_abs_average_diff=*/Values(0.),
- /*min_psnr=*/Values(99.)));
-INSTANTIATE_TEST_SUITE_P(Identity12b, RGBToYUVTest,
- Combine(/*rgb_depth=*/Values(12),
- /*yuv_depth=*/Values(12),
- ValuesIn(kAllRgbFormats),
- Values(AVIF_PIXEL_FORMAT_YUV444),
- Values(AVIF_RANGE_FULL),
- Values(kMatrixCoefficientsIdentity),
- /*add_noise=*/Values(true),
- /*rgb_step=*/Values(401),
- /*max_abs_average_diff=*/Values(0.),
- /*min_psnr=*/Values(99.)));
+INSTANTIATE_TEST_SUITE_P(
+ Identity8b, RGBToYUVTest,
+ Combine(/*rgb_depth=*/Values(8),
+ /*yuv_depth=*/Values(8, 10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV444), Values(AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsIdentity),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
+ /*add_noise=*/Values(true),
+ /*rgb_step=*/Values(31),
+ /*max_abs_average_diff=*/Values(0.),
+ /*min_psnr=*/Values(99.)));
+INSTANTIATE_TEST_SUITE_P(
+ Identity10b, RGBToYUVTest,
+ Combine(/*rgb_depth=*/Values(10),
+ /*yuv_depth=*/Values(10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV444), Values(AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsIdentity),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
+ /*add_noise=*/Values(true),
+ /*rgb_step=*/Values(101),
+ /*max_abs_average_diff=*/Values(0.),
+ /*min_psnr=*/Values(99.)));
+INSTANTIATE_TEST_SUITE_P(
+ Identity12b, RGBToYUVTest,
+ Combine(/*rgb_depth=*/Values(12),
+ /*yuv_depth=*/Values(12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV444), Values(AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsIdentity),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
+ /*add_noise=*/Values(true),
+ /*rgb_step=*/Values(401),
+ /*max_abs_average_diff=*/Values(0.),
+ /*min_psnr=*/Values(99.)));
// 4:4:4 and chroma subsampling have similar distortions on plain color inputs.
INSTANTIATE_TEST_SUITE_P(
@@ -378,6 +383,7 @@
Values(AVIF_PIXEL_FORMAT_YUV444, AVIF_PIXEL_FORMAT_YUV422,
AVIF_PIXEL_FORMAT_YUV420),
Values(AVIF_RANGE_FULL), Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
/*add_noise=*/Values(false),
/*rgb_step=*/Values(17),
/*max_abs_average_diff=*/Values(0.02), // The color drift is centered.
@@ -387,39 +393,87 @@
// Converting grey RGB samples to full-range monochrome of same or greater bit
// depth should be lossless.
-INSTANTIATE_TEST_SUITE_P(MonochromeLossless8b, RGBToYUVTest,
- Combine(/*rgb_depth=*/Values(8),
- /*yuv_depth=*/Values(8, 10, 12),
- ValuesIn(kAllRgbFormats),
- Values(AVIF_PIXEL_FORMAT_YUV400),
- Values(AVIF_RANGE_FULL),
- Values(kMatrixCoefficientsBT601),
- /*add_noise=*/Values(false),
- /*rgb_step=*/Values(1),
- /*max_abs_average_diff=*/Values(0.),
- /*min_psnr=*/Values(99.)));
-INSTANTIATE_TEST_SUITE_P(MonochromeLossless10b, RGBToYUVTest,
- Combine(/*rgb_depth=*/Values(10),
- /*yuv_depth=*/Values(10, 12),
- ValuesIn(kAllRgbFormats),
- Values(AVIF_PIXEL_FORMAT_YUV400),
- Values(AVIF_RANGE_FULL),
- Values(kMatrixCoefficientsBT601),
- /*add_noise=*/Values(false),
- /*rgb_step=*/Values(1),
- /*max_abs_average_diff=*/Values(0.),
- /*min_psnr=*/Values(99.)));
-INSTANTIATE_TEST_SUITE_P(MonochromeLossless12b, RGBToYUVTest,
- Combine(/*rgb_depth=*/Values(12),
- /*yuv_depth=*/Values(12),
- ValuesIn(kAllRgbFormats),
- Values(AVIF_PIXEL_FORMAT_YUV400),
- Values(AVIF_RANGE_FULL),
- Values(kMatrixCoefficientsBT601),
- /*add_noise=*/Values(false),
- /*rgb_step=*/Values(1),
- /*max_abs_average_diff=*/Values(0.),
- /*min_psnr=*/Values(99.)));
+INSTANTIATE_TEST_SUITE_P(
+ MonochromeLossless8b, RGBToYUVTest,
+ Combine(/*rgb_depth=*/Values(8),
+ /*yuv_depth=*/Values(8, 10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV400), Values(AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
+ /*add_noise=*/Values(false),
+ /*rgb_step=*/Values(1),
+ /*max_abs_average_diff=*/Values(0.),
+ /*min_psnr=*/Values(99.)));
+INSTANTIATE_TEST_SUITE_P(
+ MonochromeLossless10b, RGBToYUVTest,
+ Combine(/*rgb_depth=*/Values(10),
+ /*yuv_depth=*/Values(10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV400), Values(AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
+ /*add_noise=*/Values(false),
+ /*rgb_step=*/Values(1),
+ /*max_abs_average_diff=*/Values(0.),
+ /*min_psnr=*/Values(99.)));
+INSTANTIATE_TEST_SUITE_P(
+ MonochromeLossless12b, RGBToYUVTest,
+ Combine(/*rgb_depth=*/Values(12),
+ /*yuv_depth=*/Values(12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV400), Values(AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
+ /*add_noise=*/Values(false),
+ /*rgb_step=*/Values(1),
+ /*max_abs_average_diff=*/Values(0.),
+ /*min_psnr=*/Values(99.)));
+
+// Can be used to print the drift of all RGB to YUV conversion possibilities.
+// Also used for coverage.
+INSTANTIATE_TEST_SUITE_P(
+ SharpYuv8Bit, RGBToYUVTest,
+ Combine(
+ /*rgb_depth=*/Values(8),
+ /*yuv_depth=*/Values(8, 10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV420),
+ Values(AVIF_RANGE_LIMITED, AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsBT601, kMatrixCoefficientsBT709),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_CHROMA_DOWNSAMPLING_SHARP_YUV),
+ /*add_noise=*/Values(true),
+ /*rgb_step=*/Values(17),
+ /*max_abs_average_diff=*/Values(1.2), // Sharp YUV introduces some
+ // color shift.
+ /*min_psnr=*/Values(34.) // SharpYuv distortion is acceptable.
+ ));
+INSTANTIATE_TEST_SUITE_P(
+ SharpYuv10Bit, RGBToYUVTest,
+ Combine(
+ /*rgb_depth=*/Values(10),
+ /*yuv_depth=*/Values(8, 10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV420),
+ Values(AVIF_RANGE_LIMITED, AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_CHROMA_DOWNSAMPLING_SHARP_YUV),
+ /*add_noise=*/Values(true),
+ /*rgb_step=*/Values(211), // High or it would be too slow.
+ /*max_abs_average_diff=*/Values(1.2), // Sharp YUV introduces some
+ // color shift.
+ /*min_psnr=*/Values(34.) // SharpYuv distortion is acceptable.
+ ));
+INSTANTIATE_TEST_SUITE_P(
+ SharpYuv12Bit, RGBToYUVTest,
+ Combine(
+ /*rgb_depth=*/Values(12),
+ /*yuv_depth=*/Values(8, 10, 12), ValuesIn(kAllRgbFormats),
+ Values(AVIF_PIXEL_FORMAT_YUV420),
+ Values(AVIF_RANGE_LIMITED, AVIF_RANGE_FULL),
+ Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_CHROMA_DOWNSAMPLING_SHARP_YUV),
+ /*add_noise=*/Values(true),
+ /*rgb_step=*/Values(840), // High or it would be too slow.
+ /*max_abs_average_diff=*/Values(1.2), // Sharp YUV introduces some
+ // color shift.
+ /*min_psnr=*/Values(34.) // SharpYuv distortion is acceptable.
+ ));
// Can be used to print the drift of all RGB to YUV conversion possibilities.
// Also used for coverage.
@@ -431,6 +485,7 @@
AVIF_PIXEL_FORMAT_YUV420),
Values(AVIF_RANGE_LIMITED, AVIF_RANGE_FULL),
Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
/*add_noise=*/Values(false, true),
/*rgb_step=*/Values(61), // High or it would be too slow.
/*max_abs_average_diff=*/Values(1.), // Not very accurate because
@@ -444,6 +499,7 @@
AVIF_PIXEL_FORMAT_YUV420),
Values(AVIF_RANGE_LIMITED, AVIF_RANGE_FULL),
Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
/*add_noise=*/Values(false, true),
/*rgb_step=*/Values(211), // High or it would be too slow.
/*max_abs_average_diff=*/Values(0.2), // Not very accurate because
@@ -457,6 +513,7 @@
AVIF_PIXEL_FORMAT_YUV420),
Values(AVIF_RANGE_LIMITED, AVIF_RANGE_FULL),
Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
/*add_noise=*/Values(false, true),
/*rgb_step=*/Values(809), // High or it would be too slow.
/*max_abs_average_diff=*/Values(0.3), // Not very accurate because
@@ -475,6 +532,7 @@
AVIF_PIXEL_FORMAT_YUV420, AVIF_PIXEL_FORMAT_YUV400),
Values(AVIF_RANGE_FULL, AVIF_RANGE_LIMITED),
Values(kMatrixCoefficientsBT601),
+ /*avif_rgb_to_yuv_flags=*/Values(AVIF_RGB_TO_YUV_DEFAULT),
/*add_noise=*/Values(false, true),
/*rgb_step=*/Values(3), // way faster and 99% similar to rgb_step=1
/*max_abs_average_diff=*/Values(10.),