Turn off CONFIG_LOWBITDEPTH by default CodecWG agreed to have this off for default "C" model. Change-Id: I624204a96426b51f7f8bb2c9c3a3ff78f8009435
diff --git a/build/cmake/aom_config_defaults.cmake b/build/cmake/aom_config_defaults.cmake index da51551..8b918ed 100644 --- a/build/cmake/aom_config_defaults.cmake +++ b/build/cmake/aom_config_defaults.cmake
@@ -82,7 +82,7 @@ set(CONFIG_COEFFICIENT_RANGE_CHECKING 0 CACHE NUMBER "Coefficient range check.") set(CONFIG_INSPECTION 0 CACHE NUMBER "Enables bitstream inspection.") set(CONFIG_INTERNAL_STATS 0 CACHE NUMBER "Codec stats.") -set(CONFIG_LOWBITDEPTH 1 CACHE NUMBER "Enables 8-bit optimized pipeline.") +set(CONFIG_LOWBITDEPTH 0 CACHE NUMBER "Enables 8-bit optimized pipeline.") set(CONFIG_REALTIME_ONLY 0 CACHE NUMBER "Support only realtime encodes.") set(CONFIG_SIZE_LIMIT 0 CACHE NUMBER "Limit max decode width/height.") set(CONFIG_SPATIAL_RESAMPLING 1 CACHE NUMBER "Spatial resampling.")