update convex hull test script
port few recent update of the convex hull test script from experimental
branch to the research branch
Change-Id: I5f8993f23a799f34579f30a13cdb3602d8ef628d
diff --git a/tools/convexhull_framework/src/CalcBDRate.py b/tools/convexhull_framework/src/CalcBDRate.py
new file mode 100644
index 0000000..df5ff42
--- /dev/null
+++ b/tools/convexhull_framework/src/CalcBDRate.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+## Copyright (c) 2019, Alliance for Open Media. All rights reserved
+##
+## This source code is subject to the terms of the BSD 2 Clause License and
+## the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
+## was not distributed with this source code in the LICENSE file, you can
+## obtain it at www.aomedia.org/license/software. If the Alliance for Open
+## Media Patent License 1.0 was not distributed with this source code in the
+## PATENTS file, you can obtain it at www.aomedia.org/license/patent.
+##
+__author__ = "maggie.sun@intel.com, ryan.lei@intel.com"
+
+import numpy
+import math
+import scipy.interpolate
+import logging
+from Config import LoggerName
+
+subloggername = "CalcBDRate"
+loggername = LoggerName + '.' + '%s' % subloggername
+logger = logging.getLogger(loggername)
+
+# BJONTEGAARD Bjontegaard metric
+# Calculation is adapted from Google implementation
+# PCHIP method - Piecewise Cubic Hermite Interpolating Polynomial interpolation
+def BD_RATE(br1, qtyMtrc1, br2, qtyMtrc2):
+ brqtypairs1 = [(br1[i], qtyMtrc1[i]) for i in range(min(len(qtyMtrc1), len(br1)))]
+ brqtypairs2 = [(br2[i], qtyMtrc2[i]) for i in range(min(len(qtyMtrc2), len(br2)))]
+
+ if not brqtypairs1 or not brqtypairs2:
+ logger.info("one of input lists is empty!")
+ return 0.0
+
+ # sort the pair based on quality metric values increasing order
+ brqtypairs1.sort(key=lambda tup: tup[1])
+ brqtypairs2.sort(key=lambda tup: tup[1])
+
+ logbr1 = [math.log(x[0]) for x in brqtypairs1]
+ qmetrics1 = [100.0 if x[1] == float('inf') else x[1] for x in brqtypairs1]
+ logbr2 = [math.log(x[0]) for x in brqtypairs2]
+ qmetrics2 = [100.0 if x[1] == float('inf') else x[1] for x in brqtypairs2]
+
+ # remove duplicated quality metric value
+ dup_idx = [i for i in range(1, len(qmetrics1)) if qmetrics1[i - 1] == qmetrics1[i]]
+ for idx in sorted(dup_idx, reverse=True):
+ del qmetrics1[idx]
+ del logbr1[idx]
+ dup_idx = [i for i in range(1, len(qmetrics2)) if qmetrics2[i - 1] == qmetrics2[i]]
+ for idx in sorted(dup_idx, reverse=True):
+ del qmetrics2[idx]
+ del logbr2[idx]
+
+ # find max and min of quality metrics
+ min_int = max(min(qmetrics1), min(qmetrics2))
+ max_int = min(max(qmetrics1), max(qmetrics2))
+ if min_int >= max_int:
+ logger.info("no overlap from input 2 lists of quality metrics!")
+ return 0.0
+
+ # generate samples between max and min of quality metrics
+ lin = numpy.linspace(min_int, max_int, num=100, retstep=True)
+ interval = lin[1]
+ samples = lin[0]
+
+ # interpolation
+ v1 = scipy.interpolate.pchip_interpolate(qmetrics1, logbr1, samples)
+ v2 = scipy.interpolate.pchip_interpolate(qmetrics2, logbr2, samples)
+
+ # Calculate the integral using the trapezoid method on the samples.
+ int1 = numpy.trapz(v1, dx=interval)
+ int2 = numpy.trapz(v2, dx=interval)
+
+ # find avg diff
+ avg_exp_diff = (int2 - int1) / (max_int - min_int)
+ avg_diff = (math.exp(avg_exp_diff) - 1) * 100
+
+ return avg_diff
+
+'''
+if __name__ == "__main__":
+ brs1 = [64052.664, 6468.096, 4673.424, 3179.4, 2298.384, 1361.184]
+ qtys1 = [1, 1, 0.99999, 0.99998, 0.99996, 0.99992]
+ brs2 = [68461.896, 7554.96, 4827.432, 3294.024, 2380.128, 1401.744]
+ qtys2 = [1, 1, 0.99999, 0.99998, 0.99996, 0.99992]
+
+ bdrate = BD_RATE(brs1, qtys1, brs2, qtys2)
+ print("bdrate calculated is %3.3f%%" % bdrate)
+'''
diff --git a/tools/convexhull_framework/src/Config.py b/tools/convexhull_framework/src/Config.py
index 5a6ebb0..53281f3 100644
--- a/tools/convexhull_framework/src/Config.py
+++ b/tools/convexhull_framework/src/Config.py
@@ -19,7 +19,7 @@
BinPath = os.path.join(RootPath, 'bin')
WorkPath = os.path.join(RootPath, 'test')
SMOKE_TEST = False # override some parameters to do a quick smoke test
-FrameNum = 60
+FrameNum = 10
if SMOKE_TEST:
FrameNum = 2
@@ -34,8 +34,9 @@
# basename class width height framerate bitdepth fmt
#"CrowdRun": ["ClassB", 1920, 1080, 30, 8, "yuv420p"],
#"BasketballDrive": ["ClassB", 1920, 1080, 30, 8, "yuv420p"],
- "NetflixCrosswalk_1920x1080_60fps_8bit_420_60f": ["ClassB", 1920, 1080,
- 30, 8, "yuv420p"],
+ #"NetflixCrosswalk_1920x1080_60fps_8bit_420_60f": ["ClassB", 1920, 1080,
+ #30, 8, "yuv420p"],
+ "CrowdRun_1920x1080": ["ClassB", 1920, 1080, 30, 8, "yuv420p"],
}
'''
"aspen_1080p_60f": ["ClassB", 1920, 1080, 30, 8,
@@ -85,10 +86,10 @@
'''
############## Scaling settings ############################################
# down scaling ratio
-DnScaleRatio = [1.0, 1.5, 2.0, 3.0, 4.0] #, 6.0] # downscale ratio
+DnScaleRatio = [1.0, 1.5, 2.0, 2.5, 3.0, 4.0] #, 6.0] # downscale ratio
#down and up scaling algorithm, the 2 lists should be of same size
-DnScalingAlgos = ['bicubic', 'bilinear', 'gauss', 'lanczos', 'sinc']
-UpScalingAlgos = ['bicubic', 'bilinear', 'gauss', 'lanczos', 'sinc']
+DnScalingAlgos = ['lanczos'] #['bicubic', 'bilinear', 'gauss', 'lanczos', 'sinc']
+UpScalingAlgos = ['lanczos'] #['bicubic', 'bilinear', 'gauss', 'lanczos', 'sinc']
if SMOKE_TEST:
DnScalingAlgos = ['bicubic', 'lanczos', 'sinc']
@@ -115,6 +116,7 @@
'HDRTools']
VMAF = os.path.join(BinPath, 'vmafossexec.exe')
HDRTool = os.path.join(BinPath, 'HDRMetrics.exe')
+CalcBDRateInExcel = False
######################## config for exporting data to excel #################
#https://xlsxwriter.readthedocs.io/working_with_colors.html#colors
@@ -122,6 +124,10 @@
LineColors = ['blue', 'red', 'green', 'orange', 'pink', 'yellow']
ConvexHullColor = 'white'
+# find out QP/Resolution with specified qty metrics
+TargetQtyMetrics = {'VMAF_Y': [60, 70, 80, 90],
+ 'PSNR_Y': [30, 35, 38, 40, 41]}
+
# format for exported excel of convexhull test
# if to modify below 3 variables, need to modify function
# SaveConvexHullResultsToExcel accordingly
@@ -131,6 +137,12 @@
CvxH_WtRows = [CvxH_startRow + i for i in range(len(QPs))]
CvxH_WtLastCol = CvxH_WtCols[-1] + len(QualityList)
CvxH_WtLastRow = CvxH_WtRows[-1]
+
+# format for writing convexhull curve data
+CvxHDataStartRow = CvxH_WtRows[-1] + 2; CvxHDataStartCol = 0
+CvxHDataNum = 5 # qty, bitrate, qp, resolution, 1 empty row as internal
+CvxHDataRows = [CvxHDataStartRow + 1 + CvxHDataNum * i for i in range(len(QualityList))]
+
######################## post analysis #########################################
PostAnalysis_Path = os.path.join(RootPath, 'analysis')
Path_RDResults = os.path.join(PostAnalysis_Path, 'rdresult')
diff --git a/tools/convexhull_framework/src/ConvexHullBDRate.py b/tools/convexhull_framework/src/ConvexHullBDRate.py
index 26bc858..04fb1f9 100644
--- a/tools/convexhull_framework/src/ConvexHullBDRate.py
+++ b/tools/convexhull_framework/src/ConvexHullBDRate.py
@@ -14,19 +14,23 @@
import xlsxwriter
import xlrd
import argparse
-from Config import VbaBinFile, QualityList
+from Config import VbaBinFile, QualityList, CalcBDRateInExcel
+from CalcBDRate import BD_RATE
+
class ConvexHullData:
ContentName = ""
ContentClass = ""
NumRDPoints = 0
RDPoints = {}
+
def __init__(self, Name="", Class="", num=0):
self.ContentName = Name
self.ContentClass = Class
self.NumRDPoints = num
self.RDPoints = {}
+
def ParseArguments(raw_args):
parser = argparse.ArgumentParser(prog='ConvexHullBDRate.py',
usage='%(prog)s [options]', description='')
@@ -50,6 +54,7 @@
InputTarget = args.Input2
Output = args.Output
+
def read_cell_as_str(sht, row, col):
cell_val = sht.cell(row, col).value
if cell_val == '':
@@ -57,6 +62,7 @@
else:
return str(cell_val)
+
def read_cell_as_float(sht, row, col):
cell_val = sht.cell(row, col).value
if cell_val == '':
@@ -64,6 +70,7 @@
else:
return float(cell_val)
+
def read_cell_as_int(sht, row, col):
cell_val = sht.cell(row, col).value
if cell_val == '':
@@ -71,67 +78,73 @@
else:
return int(cell_val)
+
def ParseConvexHullRD(xls):
wb = xlrd.open_workbook(xls)
shts = wb.sheet_names() #list of sheet names
data = {} #dict of data, key is the sheet name
+ cols = [3 + i * 4 for i in range(len(QualityList))]
for sht_name in shts:
sht = wb.sheet_by_name(sht_name)
#skip the title row
rows = sht.nrows
- cols = sht.ncols
start_row = 1
while start_row < rows:
row = start_row
cls = read_cell_as_str(sht, row, 0)
name = read_cell_as_str(sht, row, 1)
num = read_cell_as_int(sht, row, 2)
- if (cls == '' or name == '' or num == ''):
+ if cls == '' or name == '' or num == '':
print("Error: read empty cells")
exit()
point = ConvexHullData(name, cls, num)
-
rd_data = {}
for row in range(num):
- col = 3
- for qty in QualityList:
- br = read_cell_as_float(sht, start_row+row, col)
- q = read_cell_as_float(sht, start_row+row, col + 1)
- if (br != '' and q != ''):
- if (qty in rd_data.keys()):
- rd_data[qty].append((br, q))
+ for qty, col in zip(QualityList, cols):
+ res = read_cell_as_str(sht, start_row+row, col) #Resolution
+ qp = read_cell_as_int(sht, start_row+row, col + 1) #QP
+ br = read_cell_as_float(sht, start_row+row, col + 2) #Bitrate
+ q = read_cell_as_float(sht, start_row+row, col + 3) #Quality
+ if br != '' and q != '':
+ if qty in rd_data:
+ rd_data[qty].append((res, qp, br, q))
else:
- rd_data.update({qty:[(br, q)]})
- col += 2
+ rd_data.update({qty: [(res, qp, br, q)]})
+
start_row += num
point.RDPoints = rd_data
- if (sht_name in data.keys()):
+ if sht_name in data:
data[sht_name].append(point)
else:
data.update({sht_name: [point]})
- #wb.close()
return shts, data
+
def WriteOutputHeaderRow(sht):
sht.write(0, 0, 'Content Class')
sht.write(0, 1, 'Content Name')
sht.write(0, 2, 'Num RD Points')
col = 3
for qty in QualityList:
- sht.write(0, col, 'Bitrate(kbps)')
- sht.write(0, col + 1, qty)
- col += 2
+ sht.write(0, col, 'Resolution')
+ sht.write(0, col + 1, 'QP')
+ sht.write(0, col + 2, 'Bitrate(kbps)')
+ sht.write(0, col + 3, qty)
+ col += 4
col += 1
for qty in QualityList:
- sht.write(0, col, 'Bitrate(kbps)')
- sht.write(0, col + 1, qty)
- col += 2
+ sht.write(0, col, 'Resolution')
+ sht.write(0, col + 1, 'QP')
+ sht.write(0, col + 2, 'Bitrate(kbps)')
+ sht.write(0, col + 3, qty)
+ col += 4
col += 1
for (idx, qty) in zip(range(len(QualityList)), QualityList):
- sht.write(0, col + idx, "BDRATE-%s"%qty)
+ sht.write(0, col + idx, "BDRATE-%s" % qty)
+
def WriteRDData(sht, rd_data, start_row, start_col, format):
col = start_col
@@ -140,12 +153,15 @@
row = start_row
for (line, point) in zip(range(len(rd_data.RDPoints[qty])),
rd_data.RDPoints[qty]):
- sht.write_number(row + line, col, point[0], format)
- sht.write_number(row + line, col + 1, point[1], format)
- col += 2
+ sht.write_string(row + line, col, point[0]) #Resolution
+ sht.write_number(row + line, col + 1, point[1]) #QP
+ sht.write_number(row + line, col + 2, point[2], format) #Bitrate
+ sht.write_number(row + line, col + 3, point[3], format) #Quality
+ col += 4
max_rows = max(max_rows, len(rd_data.RDPoints[qty]))
return max_rows
+
def WriteRDRecord(sht, base_data, target_data, start_row, bdrate_fmt, float_fmt):
sht.write(start_row, 0, base_data.ContentClass)
sht.write(start_row, 1, base_data.ContentName)
@@ -156,39 +172,48 @@
float_fmt)
#write target data
- target_start_col = base_start_col + 2 * len(QualityList) + 1
+ target_start_col = base_start_col + 4 * len(QualityList) + 1
target_max_rows = WriteRDData(sht, target_data, start_row, target_start_col,
float_fmt)
#write bdrate formula
- bdrate_start_col = target_start_col + 2 * len(QualityList) + 1
+ bdrate_start_col = target_start_col + 4 * len(QualityList) + 1
total_rows = max(base_max_rows, target_max_rows)
sht.write(start_row, 2, total_rows)
for (qty, col) in zip(QualityList, range(len(QualityList))):
- refbr_b = xlrd.cellnameabs(start_row, base_start_col + col * 2)
- refbr_e = xlrd.cellnameabs(start_row + total_rows - 1,
- base_start_col + col * 2)
- refq_b = xlrd.cellnameabs(start_row, base_start_col + col * 2 + 1)
- refq_e = xlrd.cellnameabs(start_row + total_rows - 1,
- base_start_col + col * 2 + 1)
+ if CalcBDRateInExcel:
+ refbr_b = xlrd.cellnameabs(start_row, base_start_col + col * 4 + 2)
+ refbr_e = xlrd.cellnameabs(start_row + total_rows - 1,
+ base_start_col + col * 4 + 2)
+ refq_b = xlrd.cellnameabs(start_row, base_start_col + col * 4 + 3)
+ refq_e = xlrd.cellnameabs(start_row + total_rows - 1,
+ base_start_col + col * 4 + 3)
- testbr_b = xlrd.cellnameabs(start_row, target_start_col + col * 2)
- testbr_e = xlrd.cellnameabs(start_row + total_rows - 1,
- target_start_col + col * 2)
- testq_b = xlrd.cellnameabs(start_row, target_start_col + col * 2 + 1)
- testq_e = xlrd.cellnameabs(start_row + total_rows - 1,
- target_start_col + col * 2 + 1)
+ testbr_b = xlrd.cellnameabs(start_row, target_start_col + col * 4 + 2)
+ testbr_e = xlrd.cellnameabs(start_row + total_rows - 1,
+ target_start_col + col * 4 + 2)
+ testq_b = xlrd.cellnameabs(start_row, target_start_col + col * 4 + 3)
+ testq_e = xlrd.cellnameabs(start_row + total_rows - 1,
+ target_start_col + col * 4 + 3)
- # formula = '=-bdrate(%s:%s,%s:%s,%s:%s,%s:%s)' % (
- # refbr_b, refbr_e, refq_b, refq_e, testbr_b, testbr_e, testq_b, testq_e)
- formula = '=bdRateExtend(%s:%s,%s:%s,%s:%s,%s:%s)'\
- % (refbr_b, refbr_e, refq_b, refq_e, testbr_b, testbr_e, testq_b, testq_e)
- sht.write_formula(start_row, bdrate_start_col + col, formula, bdrate_fmt)
+ # formula = '=-bdrate(%s:%s,%s:%s,%s:%s,%s:%s)' % (
+ # refbr_b, refbr_e, refq_b, refq_e, testbr_b, testbr_e, testq_b, testq_e)
+ formula = '=bdRateExtend(%s:%s,%s:%s,%s:%s,%s:%s)'\
+ % (refbr_b, refbr_e, refq_b, refq_e, testbr_b, testbr_e, testq_b, testq_e)
+ sht.write_formula(start_row, bdrate_start_col + col, formula, bdrate_fmt)
+ else:
+ refbrs = [base_data.RDPoints[qty][i][2] for i in range(len(base_data.RDPoints[qty]))]
+ refqtys = [base_data.RDPoints[qty][i][3] for i in range(len(base_data.RDPoints[qty]))]
+ testbrs = [target_data.RDPoints[qty][i][2] for i in range(len(target_data.RDPoints[qty]))]
+ testqtys = [target_data.RDPoints[qty][i][3] for i in range(len(target_data.RDPoints[qty]))]
+ bdrate = BD_RATE(refbrs, refqtys, testbrs, testqtys) / 100.0
+ sht.write_number(start_row, bdrate_start_col + col, bdrate, bdrate_fmt)
return total_rows
+
def FindContent(name, rd_data):
for data in rd_data:
- if (name == data.ContentName):
+ if name == data.ContentName:
return data
return ''
@@ -196,9 +221,9 @@
# main
######################################
if __name__ == "__main__":
- sys.argv = ["","-i1","ConvexHullRD_ScaleAlgosNum_5_ffmpeg_hevc_medium.xlsx",
- "-i2","ConvexHullRD_ScaleAlgosNum_5_ffmpeg_hevc_veryslow.xlsx",
- "-o","ConvexHullBDRate.xlsm"]
+ #sys.argv = ["","-i1","ConvexHullData_ScaleAlgosNum_6_aom_av1_1.xlsx",
+ #"-i2","ConvexHullData_ScaleAlgosNum_6_aom_av1_6.xlsx",
+ #"-o","ConvexHullBDRate.xlsm"]
ParseArguments(sys.argv)
base_shts, base_rd_data = ParseConvexHullRD(InputBase)
@@ -220,8 +245,9 @@
for base_data in base_rd_data[sht_name]:
ContentName = base_data.ContentName
target_data = FindContent(ContentName, target_rd_data[sht_name])
- if (target_data != ''):
+ if target_data != '':
total_rows = WriteRDRecord(sht, base_data, target_data,
start_row, bdrate_fmt, float_fmt)
start_row += total_rows
+
output_wb.close()
diff --git a/tools/convexhull_framework/src/ConvexHullTest.py b/tools/convexhull_framework/src/ConvexHullTest.py
index 327249b..3c3e4c3 100644
--- a/tools/convexhull_framework/src/ConvexHullTest.py
+++ b/tools/convexhull_framework/src/ConvexHullTest.py
@@ -21,14 +21,15 @@
AddSeriesToChart_Scatter, InsertChartsToSheet, CreateNewSubfolder,\
GetContents, SetupLogging, UpdateChart, AddSeriesToChart_Scatter_Rows,\
Cleanfolder
-from PostAnalysis_Summary import GenerateSummaryExcelFile,\
+from PostAnalysis_Summary import GenerateSummaryRDDataExcelFile,\
GenerateSummaryConvexHullExcelFile
from ScalingTest import Run_Scaling_Test, SaveScalingResultsToExcel
import Utils
from Config import LogLevels, FrameNum, DnScaleRatio, QPs, CvxH_WtCols,\
CvxH_WtRows, QualityList, LineColors, DnScalingAlgos, UpScalingAlgos,\
ContentPath, SummaryOutPath, WorkPath, Path_RDResults, Clips, \
- ConvexHullColor, EncodeMethods, CodecNames, LoggerName, LogCmdOnly
+ ConvexHullColor, EncodeMethods, CodecNames, LoggerName, LogCmdOnly, \
+ TargetQtyMetrics, CvxHDataRows, CvxHDataStartRow, CvxHDataStartCol, CvxHDataNum
###############################################################################
##### Helper Functions ########################################################
@@ -105,58 +106,79 @@
return lower, upper
-def AddConvexHullCurveToCharts(sht, startrow, startcol, charts, rdPoints,
- dnScaledRes, sum_sht, sum_start_row):
- shtname = sht.get_name()
- sht.write(startrow, startcol, "ConvexHull Data")
- numitems = 5 # qty, bitrate, qp, resolution, 1 empty row as internal
- rows = [startrow + 1 + numitems * i for i in range(len(QualityList))]
- hull = {}
+def LookUpQPAndResolutionInConvexHull(qtyvals, qtyhull, qtycvhQPs, qtycvhRes):
+ cvhqtys = [h[1] for h in qtyhull]
+ qtyQPs = []; qtyRes = []
+ for val in qtyvals:
+ closest_idx = min(range(len(cvhqtys)), key=lambda i: abs(cvhqtys[i] - val))
+ if (closest_idx == 0 and val > cvhqtys[0]) or (closest_idx == (len(qtyvals) - 1) and val < cvhqtys[-1]):
+ Utils.Logger.info("the give value of quality metric is out of range of convex hull test quality values.")
+
+ qtyQPs.append(qtycvhQPs[closest_idx])
+ qtyRes.append(qtycvhRes[closest_idx])
+
+ return qtyQPs, qtyRes
+
+
+def AddConvexHullCurveToCharts(sht, charts, rdPoints, dnScaledRes, tgtqmetrics):
+ shtname = sht.get_name()
+ sht.write(CvxHDataStartRow, CvxHDataStartCol, "ConvexHull Data")
+
+ hull = {}; cvh_QPs = {}; cvh_Res_txt = {}
max_len = 0
- for qty, idx, row in zip(QualityList, range(len(QualityList)), rows):
+ for qty, idx, row in zip(QualityList, range(len(QualityList)), CvxHDataRows):
lower, upper = convex_hull(rdPoints[idx])
hull[qty] = upper
max_len = max(max_len, len(upper))
- sht.write(row, startcol, qty)
- sht.write(row + 1, startcol, "Bitrate(kbps)")
- sht.write(row + 2, startcol, "QP")
- sht.write(row + 3, startcol, 'Resolution')
+ sht.write(row, CvxHDataStartCol, qty)
+ sht.write(row + 1, CvxHDataStartCol, "Bitrate(kbps)")
+ sht.write(row + 2, CvxHDataStartCol, "QP")
+ sht.write(row + 3, CvxHDataStartCol, 'Resolution')
brts = [h[0] for h in hull[qty]]
qtys = [h[1] for h in hull[qty]]
- sht.write_row(row, startcol + 1, qtys)
- sht.write_row(row + 1, startcol + 1, brts)
+ sht.write_row(row, CvxHDataStartCol + 1, qtys)
+ sht.write_row(row + 1, CvxHDataStartCol + 1, brts)
- rdpnts_qtys = [rd[1] for rd in rdPoints[idx]]
- cvh_qidxs = [rdpnts_qtys.index(qty) for qty in qtys]
- cvh_QPs = [QPs[i % len(QPs)] for i in cvh_qidxs]
- cvh_Res = [dnScaledRes[i // len(QPs)] for i in cvh_qidxs]
- cvh_Res_txt = ["%sx%s" % (x, y) for (x, y) in cvh_Res]
- sht.write_row(row + 2, startcol + 1, cvh_QPs)
- sht.write_row(row + 3, startcol + 1, cvh_Res_txt)
+ cvh_idxs = [rdPoints[idx].index((brt, qty)) for brt, qty in zip(brts, qtys)]
+ cvh_QPs[qty] = [QPs[i % len(QPs)] for i in cvh_idxs]
+ cvh_Res = [dnScaledRes[i // len(QPs)] for i in cvh_idxs]
+ cvh_Res_txt[qty] = ["%sx%s" % (x, y) for (x, y) in cvh_Res]
+ sht.write_row(row + 2, CvxHDataStartCol + 1, cvh_QPs[qty])
+ sht.write_row(row + 3, CvxHDataStartCol + 1, cvh_Res_txt[qty])
- cols = [startcol + 1 + i for i in range(len(hull[qty]))]
+ cols = [CvxHDataStartCol + 1 + i for i in range(len(hull[qty]))]
AddSeriesToChart_Scatter_Rows(shtname, cols, row, row + 1, charts[idx],
'ConvexHull', ConvexHullColor)
+ endrow = CvxHDataRows[-1] + CvxHDataNum
- endrow = rows[-1] + numitems
+ # find out QP/resolution for given qty metric and qty value
+ startrow_fdout = endrow + 1
+ sht.write(startrow_fdout, CvxHDataStartCol, " Find out QP/resolution for given quality metrics:")
+ numitem_fdout = 4 # qtymetric values, QP, resolution, one empty row
+ startrows_fdout = [startrow_fdout + 1 + i * numitem_fdout for i in range(len(tgtqmetrics))]
- #add convex hull data into summary sheet. the length of each convex hull
- #could be different
- sum_row = sum_start_row
- sum_sht.write(sum_row, 2, max_len)
- sum_col = 3
- for qty in QualityList:
- row = sum_row
- for point in hull[qty]:
- sum_sht.write(row, sum_col, point[0])
- sum_sht.write(row, sum_col + 1, point[1])
- row += 1
- sum_col += 2
+ for metric, idx in zip(tgtqmetrics, range(len(tgtqmetrics))):
+ if metric not in QualityList:
+ Utils.Logger.error("wrong qty metric name. should be one of the name in QualityList.")
+ return endrow
- return endrow, sum_start_row + max_len - 1
+ qtyvals = tgtqmetrics[metric]
+ qtyQPs, qtyRes = LookUpQPAndResolutionInConvexHull(qtyvals, hull[metric],
+ cvh_QPs[metric], cvh_Res_txt[metric])
+ # write the look up result into excel file
+ startrow = startrows_fdout[idx]
+ sht.write(startrow, CvxHDataStartCol, metric)
+ sht.write_row(startrow, 1, qtyvals)
+ sht.write(startrow + 1, CvxHDataStartCol, 'QP')
+ sht.write_row(startrow + 1, CvxHDataStartCol + 1, qtyQPs)
+ sht.write(startrow + 2, CvxHDataStartCol, 'Resolution')
+ sht.write_row(startrow + 2, CvxHDataStartCol + 1, qtyRes)
+ endrow = startrow + 3
+
+ return endrow
###############################################################################
######### Major Functions #####################################################
@@ -211,8 +233,7 @@
Utils.Logger.info("finish running encode test.")
-def SaveConvexHullResultsToExcel(content, dnScAlgos, upScAlgos, sum_wb,
- sum_start_row):
+def SaveConvexHullResultsToExcel(content, dnScAlgos, upScAlgos):
Utils.Logger.info("start saving RD results to excel file.......")
if not os.path.exists(Path_RDResults):
os.makedirs(Path_RDResults)
@@ -231,7 +252,6 @@
sht.write(1, 0, "QP")
sht.write_column(CvxH_WtRows[0], 0, QPs)
shtname = sht.get_name()
- sum_sht = sum_wb.get_worksheet_by_name(shtname)
charts = []; y_mins = {}; y_maxs = {}; RDPoints = {}
for qty, x in zip(QualityList, range(len(QualityList))):
@@ -279,16 +299,8 @@
RDPoints[x] = RDPoints[x] + rdpnts
# add convexhull curve to charts
- startrow = CvxH_WtRows[-1] + 2; startcol = 0
-
- sum_startrow = sum_start_row[shtname]
- sum_sht.write(sum_startrow, 0, cls)
- sum_sht.write(sum_startrow, 1, contentname)
- endrow, sum_end_row = AddConvexHullCurveToCharts(sht, startrow, startcol,
- charts, RDPoints,
- DnScaledRes, sum_sht,
- sum_startrow)
- sum_start_row[shtname] = sum_end_row + 1
+ endrow = AddConvexHullCurveToCharts(sht, charts, RDPoints, DnScaledRes,
+ TargetQtyMetrics)
#update RD chart with approprate y axis range
for qty, x in zip(QualityList, range(len(QualityList))):
@@ -304,6 +316,7 @@
wb.close()
Utils.Logger.info("finish export convex hull results to excel file.")
+
def ParseArguments(raw_args):
parser = argparse.ArgumentParser(prog='ConvexHullTest.py',
usage='%(prog)s [options]',
@@ -356,7 +369,11 @@
# main
######################################
if __name__ == "__main__":
- #sys.argv = ["","-f","convexhull","-c","hevc","-m","ffmpeg","-p","medium"]
+ #sys.argv = ["","-f","scaling"]
+ #sys.argv = ["", "-f", "sumscaling"]
+ #sys.argv = ["", "-f", "encode","-c","av1","-m","aom","-p","1"]
+ #sys.argv = ["", "-f", "convexhull","-c","av1","-m","aom","-p","6"]
+ #sys.argv = ["", "-f", "summary", "-c", "av1", "-m", "aom", "-p", "6"]
ParseArguments(sys.argv)
# preparation for executing functions
@@ -381,20 +398,21 @@
for dnScalAlgo, upScalAlgo in zip(DnScalingAlgos, UpScalingAlgos):
Run_ConvexHull_Test(content, dnScalAlgo, upScalAlgo)
elif Function == 'convexhull':
- sum_wb, sum_start_row = GenerateSummaryConvexHullExcelFile(EncodeMethod,
- CodecName,
- EncodePreset,
- SummaryOutPath,
- DnScalingAlgos,
- UpScalingAlgos)
for content in Contents:
- SaveConvexHullResultsToExcel(content, DnScalingAlgos, UpScalingAlgos,
- sum_wb, sum_start_row)
- sum_wb.close()
+ SaveConvexHullResultsToExcel(content, DnScalingAlgos, UpScalingAlgos)
+
elif Function == 'summary':
- smfile = GenerateSummaryExcelFile(EncodeMethod, CodecName, EncodePreset,
- SummaryOutPath, Path_RDResults,
- ContentPath, Clips)
- Utils.Logger.info("summary file generated: %s" % smfile)
+ RDResultFilesGenerated = []
+ for content in Contents:
+ RDResultFilesGenerated.append(GetRDResultExcelFile(content))
+
+ RDsmfile = GenerateSummaryRDDataExcelFile(EncodeMethod, CodecName, EncodePreset,
+ SummaryOutPath, RDResultFilesGenerated,
+ ContentPath, Clips)
+ Utils.Logger.info("RD data summary file generated: %s" % RDsmfile)
+
+ CvxHsmfile = GenerateSummaryConvexHullExcelFile(EncodeMethod, CodecName, EncodePreset,
+ SummaryOutPath, RDResultFilesGenerated)
+ Utils.Logger.info("Convel hull summary file generated: %s" % CvxHsmfile)
else:
Utils.Logger.error("invalid parameter value of Function")
diff --git a/tools/convexhull_framework/src/PostAnalysis_Summary.py b/tools/convexhull_framework/src/PostAnalysis_Summary.py
index 1d711e9..c2b78a9 100644
--- a/tools/convexhull_framework/src/PostAnalysis_Summary.py
+++ b/tools/convexhull_framework/src/PostAnalysis_Summary.py
@@ -13,10 +13,11 @@
import os
import xlsxwriter
import xlrd
+import re
from Config import QPs, DnScaleRatio, QualityList, VbaBinFile, CvxH_WtRows,\
- CvxH_WtLastCol, LoggerName
-from Utils import GetShortContentName, CalcRowsClassAndContentDict,\
- SweepScalingAlgosInOneResultFile
+ CvxH_WtLastCol, LoggerName, CalcBDRateInExcel, CvxH_WtCols, CvxHDataRows, CvxHDataStartCol
+from Utils import GetShortContentName, CalcRowsClassAndContentDict
+from CalcBDRate import BD_RATE
import logging
subloggername = "PostAnalysisSummary"
@@ -29,18 +30,40 @@
################################################################################
### Helper Functions ###########################################################
-def GetSummaryFileName(encMethod, codecName, preset, path):
- name = 'ConvexHullSummary_ScaleAlgosNum_%d_%s_%s_%s.xlsm'\
+def GetRDSummaryFileName(encMethod, codecName, preset, path):
+ filetype = 'xlsm' if CalcBDRateInExcel else 'xlsx'
+ name = 'ConvexHullRDSummary_ScaleAlgosNum_%d_%s_%s_%s.%s'\
+ % (len(DnScaleRatio), encMethod, codecName, preset, filetype)
+ return os.path.join(path, name)
+
+def GetConvexHullDataSummaryFileName(encMethod, codecName, preset, path):
+ name = 'ConvexHullData_ScaleAlgosNum_%d_%s_%s_%s.xlsx'\
% (len(DnScaleRatio), encMethod, codecName, preset)
return os.path.join(path, name)
-def GetConvexHullRDFileName(encMethod, codecName, preset, path):
- name = 'ConvexHullRD_ScaleAlgosNum_%d_%s_%s_%s.xlsx'\
- % (len(DnScaleRatio), encMethod, codecName, preset)
- return os.path.join(path, name)
+def SweepScalingAlgosInOneResultFile(resultfiles):
+ dnscls = []
+ upscls = []
-def CopyResultDataToSummaryFile_Onesheet(sht, wt_cols, contentsdict, rows_class,
- infile_path):
+ # here assume all result files includes same combinations of dn and up scaling algos
+ # that is, same number of sheet and sheet names
+ file = resultfiles[0]
+ if os.path.isfile(file):
+ rdwb = xlrd.open_workbook(file)
+ else:
+ return dnscls, upscls
+ if rdwb is not None:
+ shtnms = rdwb.sheet_names()
+ for shtname in shtnms:
+ item = re.findall(r"(.+)\-\-(.+)", shtname)
+ dnsl = item[0][0]
+ upsl = item[0][1]
+ dnscls.append(dnsl)
+ upscls.append(upsl)
+
+ return dnscls, upscls
+
+def CopyResultDataToSummaryFile_Onesheet(sht, wt_cols, resultfiles):
rdrows = CvxH_WtRows
rd_endcol = CvxH_WtLastCol
@@ -56,8 +79,7 @@
# copy the results data from each content's result file to corresponding
# location in summary excel file
- resultfiles = os.listdir(infile_path)
- for (cls, contents), row_class in zip(contentsdict.items(), rows_class):
+ for (cls, contents), row_class in zip(ContentsDict.items(), Rows_Class):
sht.write(row_class, 0, cls)
rows_content = [i * len(QPs) for i in range(len(contents))]
for content, row_cont in zip(contents, rows_content):
@@ -66,7 +88,7 @@
rdwb = None
for resfile in resultfiles:
if key in resfile:
- rdwb = xlrd.open_workbook(os.path.join(infile_path, resfile))
+ rdwb = xlrd.open_workbook(resfile)
rdsht = rdwb.sheet_by_name(shtname)
for i, rdrow in zip(range(len(QPs)), rdrows):
data = rdsht.row_values(rdrow, 0, rd_endcol + 1)
@@ -77,14 +99,14 @@
logger.warning("not find convex hull result file for content:%s"
% content)
-def CalBDRate_OneSheet(sht, cols, contentsdict, rows_class, cols_bdmtrs, cellformat):
+def CalBDRateWithExcel_OneSheet(sht, cols, cols_bdmtrs, cellformat):
row_refst = 0
- bdstep = 3
+ bdstep = len(QPs) - 1
for cols_bd, residx in zip(cols_bdmtrs, range(1, len(DnScaleRatio))):
sht.write(0, cols_bd, 'BD-Rate %.2f vs. %.2f' % (DnScaleRatio[residx],
DnScaleRatio[0]))
sht.write_row(1, cols_bd, QualityList)
- for (cls, contents), row_class in zip(contentsdict.items(), rows_class):
+ for (cls, contents), row_class in zip(ContentsDict.items(), Rows_Class):
rows_content = [i * len(QPs) for i in range(len(contents))]
for row_cont in rows_content:
for y in range(len(QualityList)):
@@ -97,16 +119,16 @@
refq_e = xlrd.cellnameabs(row_class + row_cont + row_refst
+ bdstep, cols[0] + 1 + y)
- testbr_b = xlrd.cellnameabs(row_class + row_cont,
+ testbr_b = xlrd.cellnameabs(row_class + row_cont + row_refst,
cols[residx])
- testbr_e = xlrd.cellnameabs(row_class + row_cont + bdstep,
- cols[residx])
- testq_b = xlrd.cellnameabs(row_class + row_cont,
+ testbr_e = xlrd.cellnameabs(row_class + row_cont + row_refst
+ + bdstep, cols[residx])
+ testq_b = xlrd.cellnameabs(row_class + row_cont + row_refst,
cols[residx] + 1 + y)
- testq_e = xlrd.cellnameabs(row_class + row_cont + bdstep,
- cols[residx] + 1 + y)
+ testq_e = xlrd.cellnameabs(row_class + row_cont + row_refst
+ + bdstep, cols[residx] + 1 + y)
- #formula = '=-bdrate(%s:%s,%s:%s,%s:%s,%s:%s)' % (
+ #formula = '=bdrate(%s:%s,%s:%s,%s:%s,%s:%s)' % (
#refbr_b, refbr_e, refq_b, refq_e, testbr_b, testbr_e,
# testq_b, testq_e)
formula = '=bdRateExtend(%s:%s,%s:%s,%s:%s,%s:%s)'\
@@ -115,6 +137,36 @@
sht.write_formula(row_class + row_cont, cols_bd + y, formula,
cellformat)
+
+def CalBDRateWithPython_OneSheet(sht, cols_bdmtrs, resultfiles, cellformat):
+ row_refst = 0
+ bdstep = len(QPs) - 1
+ assert row_refst + bdstep < len(CvxH_WtRows)
+
+ shtname = sht.get_name()
+ rdrows = CvxH_WtRows
+ rdcols = CvxH_WtCols
+ for cols_bd, residx in zip(cols_bdmtrs, range(1, len(DnScaleRatio))):
+ sht.write(0, cols_bd, 'BD-Rate %.2f vs. %.2f' % (DnScaleRatio[residx],
+ DnScaleRatio[0]))
+ sht.write_row(1, cols_bd, QualityList)
+ for (cls, contents), row_class in zip(ContentsDict.items(), Rows_Class):
+ rows_content = [i * len(QPs) for i in range(len(contents))]
+ for row_cont, content in zip(rows_content, contents):
+ key = GetShortContentName(content)
+ for resfile in resultfiles:
+ if key in resfile:
+ rdwb = xlrd.open_workbook(resfile)
+ rdsht = rdwb.sheet_by_name(shtname)
+ break
+ for y in range(len(QualityList)):
+ refbrs = rdsht.col_values(rdcols[0], rdrows[row_refst], rdrows[row_refst + bdstep] + 1)
+ refqtys = rdsht.col_values(rdcols[0] + 1 + y, rdrows[row_refst], rdrows[row_refst + bdstep] + 1)
+ testbrs = rdsht.col_values(rdcols[residx], rdrows[row_refst], rdrows[row_refst + bdstep] + 1)
+ testqtys = rdsht.col_values(rdcols[residx] + 1 + y, rdrows[row_refst], rdrows[row_refst + bdstep] + 1)
+ bdrate = BD_RATE(refbrs, refqtys, testbrs, testqtys) / 100.0
+ sht.write(row_class + row_cont, cols_bd + y, bdrate, cellformat)
+
def GenerateFormula_SumRows(shtname, rows, col):
cells = ''
for row in rows:
@@ -134,7 +186,7 @@
formula = '=SUM(%s)/%d' % (cells, num)
return formula
-def WriteBitrateQtyAverageSheet(wb, rdshts, contentsdict, rd_rows_class, rdcols):
+def WriteBitrateQtyAverageSheet(wb, rdshts, rdcols):
avg_sht = wb.add_worksheet('Average')
avg_sht.write(2, 0, 'Content Class')
avg_sht.write(2, 1, 'Content Number')
@@ -162,11 +214,11 @@
startrow = 3
step = len(QPs)
- rows_class_avg = [startrow + step * i for i in range(len(contentsdict))]
+ rows_class_avg = [startrow + step * i for i in range(len(ContentsDict))]
totalnum_content = 0
- for (cls, contents), row_class, rdclassrow in zip(contentsdict.items(),
+ for (cls, contents), row_class, rdclassrow in zip(ContentsDict.items(),
rows_class_avg,
- rd_rows_class):
+ Rows_Class):
avg_sht.write(row_class, 0, cls)
totalnum_content = totalnum_content + len(contents)
avg_sht.write(row_class, 1, len(contents))
@@ -226,8 +278,7 @@
if residx == 0:
break
-def WriteBDRateAverageSheet(wb, rdshts, contentsdict, rd_rows_class,
- rd_cols_bdmtrs, cellformat):
+def WriteBDRateAverageSheet(wb, rdshts, rd_cols_bdmtrs, cellformat):
# write bdrate average sheet
bdavg_sht = wb.add_worksheet('Average_BDRate')
bdavg_sht.write(2, 0, 'Content Class')
@@ -242,7 +293,7 @@
cols_upscl_bd = [step_upscl * i for i in range(len(upScalAlgos))]
step_res = len(upScalAlgos) * step_upscl + colintval_dnscalres
cols_res_bd = [step_res * i + startcol for i in range(len(DnScaleRatio) - 1)]
- rows_class_rdavg = [startrow + i for i in range(len(contentsdict))]
+ rows_class_rdavg = [startrow + i for i in range(len(ContentsDict))]
for residx, col_res_bd in zip(range(1, len(DnScaleRatio)), cols_res_bd):
bdavg_sht.write(0, col_res_bd, 'BD-Rate %.2f vs. %.2f'
@@ -252,9 +303,9 @@
bdavg_sht.write_row(2, col_res_bd + col_upscl_bd, QualityList)
totalnum_content = 0
- for (cls, contents), row_class, rdclassrow in zip(contentsdict.items(),
+ for (cls, contents), row_class, rdclassrow in zip(ContentsDict.items(),
rows_class_rdavg,
- rd_rows_class):
+ Rows_Class):
bdavg_sht.write(row_class, 0, cls)
totalnum_content = totalnum_content + len(contents)
bdavg_sht.write(row_class, 1, len(contents))
@@ -284,6 +335,7 @@
bdavg_sht.write_formula(last_row, col_res + col_upscl + j,
formula, cellformat)
+
#######################################################################
#######################################################################
# GenerateSummaryExcelFile is to
@@ -295,22 +347,20 @@
# Arguments description:
# content_paths is where test contents located, which used for generating convex
# hull results.
-# infile_path is where convex hull results excel files located.
-# classes is the content classes and here it requires contents are located
-# in corresponding subfolder named with its belonged class
+# resultfiles is a list of all convex hull RD result files generated by
+# runninging '-f convexhull'
# summary_outpath is the folder where output summary file will be
-# note: all results files under infile_path should have exactly same test items
-# before running this summary script
-def GenerateSummaryExcelFile(encMethod, codecName, preset, summary_outpath,
- infile_path, content_path, clips):
+def GenerateSummaryRDDataExcelFile(encMethod, codecName, preset, summary_outpath,
+ resultfiles, content_path, clips):
+
global dnScalAlgos, upScalAlgos
- # find all scaling algos tested in results file, expect they are the same
- # for every content
- dnScalAlgos, upScalAlgos = SweepScalingAlgosInOneResultFile(infile_path)
+ # find all scaling algos tested in results file,
+ # IMPORTANT: expect up and down scaling algos are the same for every content
+ dnScalAlgos, upScalAlgos = SweepScalingAlgosInOneResultFile(resultfiles)
if not os.path.exists(summary_outpath):
os.makedirs(summary_outpath)
- smfile = GetSummaryFileName(encMethod, codecName, preset, summary_outpath)
+ smfile = GetRDSummaryFileName(encMethod, codecName, preset, summary_outpath)
wb = xlsxwriter.Workbook(smfile)
# shts is for all scaling algorithms' convex hull test results
@@ -326,16 +376,17 @@
colstart = 3
colInterval = 2
rowstart = 2
-
+ # to generate rows number of starting of each class: Rows_Class
+ global ContentsDict, Rows_Class
+ ContentsDict, Rows_Class = CalcRowsClassAndContentDict(rowstart,
+ content_path,
+ clips, len(QPs))
# cols is column number of results files
step = colInterval + 1 + len(QualityList) # 1 is for bitrate
sum_wtcols = [step * i + colstart for i in range(len(DnScaleRatio))]
- # to generate rows number of starting of each class: rows_class
- contentsdict, rows_class = CalcRowsClassAndContentDict(rowstart,
- content_path,
- clips, len(QPs))
- wb.add_vba_project(VbaBinFile)
+ if CalcBDRateInExcel:
+ wb.add_vba_project(VbaBinFile)
cellformat = wb.add_format()
cellformat.set_num_format('0.00%')
#cols_bdmtrs is the column number to write the bdrate data
@@ -345,45 +396,98 @@
# -1 because first resolution is used as reference
for sht in shts:
- CopyResultDataToSummaryFile_Onesheet(sht, sum_wtcols, contentsdict,
- rows_class, infile_path)
+ CopyResultDataToSummaryFile_Onesheet(sht, sum_wtcols, resultfiles)
# calculate bd rate in each scaling sheet
- CalBDRate_OneSheet(sht, sum_wtcols, contentsdict, rows_class,
- cols_bdmtrs, cellformat)
+ if CalcBDRateInExcel:
+ CalBDRateWithExcel_OneSheet(sht, sum_wtcols, cols_bdmtrs, cellformat)
+ else:
+ CalBDRateWithPython_OneSheet(sht, cols_bdmtrs, resultfiles, cellformat)
# calculate average bitrate and quality metrics for each category and
# write to "average" sheet
- WriteBitrateQtyAverageSheet(wb, shts, contentsdict, rows_class, sum_wtcols)
+ WriteBitrateQtyAverageSheet(wb, shts, sum_wtcols)
# calculate average bd metrics and write to a new sheet
- WriteBDRateAverageSheet(wb, shts, contentsdict, rows_class, cols_bdmtrs,
- cellformat)
+ WriteBDRateAverageSheet(wb, shts, cols_bdmtrs, cellformat)
wb.close()
return smfile
def GenerateSummaryConvexHullExcelFile(encMethod, codecName, preset,
- summary_outpath, DnScalingAlgos,
- UpScalingAlgos):
+ summary_outpath, resultfiles):
if not os.path.exists(summary_outpath):
os.makedirs(summary_outpath)
- smfile = GetConvexHullRDFileName(encMethod, codecName, preset,
+ smfile = GetConvexHullDataSummaryFileName(encMethod, codecName, preset,
summary_outpath)
- sum_wb = xlsxwriter.Workbook(smfile)
+ wb = xlsxwriter.Workbook(smfile)
# shts is for all scaling algorithms' convex hull test results
- sum_start_row = {}
- #write the header in each sheet
- for dnsc, upsc in zip(DnScalingAlgos, UpScalingAlgos):
+ shts = []
+ cols = [3 + i * 4 for i in range(len(QualityList))]
+ for dnsc, upsc in zip(dnScalAlgos, upScalAlgos):
shtname = dnsc + '--' + upsc
- sht = sum_wb.add_worksheet(shtname)
+ sht = wb.add_worksheet(shtname)
+ shts.append(sht)
+ # write headers
sht.write(0, 0, 'Content Class')
sht.write(0, 1, 'Content Name')
sht.write(0, 2, 'Num RD Points')
- col = 3
- for qty in QualityList:
- sht.write(0, col, 'Bitrate(kbps)')
- sht.write(0, col + 1, qty)
- col += 2
- sum_start_row[shtname] = 1
- return sum_wb, sum_start_row
+
+ for qty, col in zip(QualityList, cols):
+ sht.write(0, col, 'Resolution')
+ sht.write(0, col + 1, 'QP')
+ sht.write(0, col + 2, 'Bitrate(kbps)')
+ sht.write(0, col + 3, qty)
+
+ # copy convexhull data from each content's result file to corresponding
+ # location in summary excel file
+ row = 1
+ rdcolstart = CvxHDataStartCol + 1
+ for (cls, contents) in ContentsDict.items():
+ sht.write(row, 0, cls)
+ for content in contents:
+ key = GetShortContentName(content)
+ sht.write(row, 1, key)
+ for resfile in resultfiles:
+ if key in resfile:
+ rdwb = xlrd.open_workbook(resfile)
+ rdsht = rdwb.sheet_by_name(shtname)
+ maxNumQty = 0
+ for rdrow, col in zip(CvxHDataRows, cols):
+ qtys = []; brs = []; qps = []; ress = []
+ numQty = 0
+ for qty in rdsht.row_values(rdrow)[rdcolstart:]:
+ if qty == '':
+ break
+ else:
+ qtys.append(qty)
+ numQty = numQty + 1
+ maxNumQty = max(maxNumQty, numQty)
+
+ for br in rdsht.row_values(rdrow + 1)[rdcolstart:]:
+ if br == '':
+ break
+ else:
+ brs.append(br)
+ for qp in rdsht.row_values(rdrow + 2)[rdcolstart:]:
+ if qp == '':
+ break
+ else:
+ qps.append(qp)
+ for res in rdsht.row_values(rdrow + 3)[rdcolstart:]:
+ if res == '':
+ break
+ else:
+ ress.append(res)
+
+ sht.write_column(row, col, ress)
+ sht.write_column(row, col + 1, qps)
+ sht.write_column(row, col + 2, brs)
+ sht.write_column(row, col + 3, qtys)
+
+ sht.write(row, 2, maxNumQty)
+ row = row + maxNumQty
+ break
+
+ wb.close()
+ return smfile
diff --git a/tools/convexhull_framework/src/ScalingTest.py b/tools/convexhull_framework/src/ScalingTest.py
index a8f4a7a..aa26d80 100644
--- a/tools/convexhull_framework/src/ScalingTest.py
+++ b/tools/convexhull_framework/src/ScalingTest.py
@@ -156,8 +156,7 @@
wb.close()
logger.info("finish export scaling quality results to excel file.")
-def GenerateSummarySheet(wb, dnScalAlgos, upScalAlgos, ratio, contentsdict,
- rows_class, path_log):
+def GenerateSummarySheet(wb, dnScalAlgos, upScalAlgos, ratio, path_log):
logger.info("start generate summary sheet for ratio %2.2f" % ratio)
shts = []
@@ -176,7 +175,7 @@
sht.write_row(1, col, QualityList)
content_infos = {}; totalnum_contents = 0
- for (clss, contents), row_clss in zip(contentsdict.items(), rows_class):
+ for (clss, contents), row_clss in zip(ContentsDict.items(), Rows_Class):
sht.write(row_clss, 0, clss)
totalnum_contents = totalnum_contents + len(contents)
for content, row_cont in zip(contents, range(len(contents))):
@@ -203,7 +202,7 @@
range(len(dnScalAlgos))):
qualities = []
seriname = dn_algo + '--' + up_algo
- for (clss, contents), row_clss in zip(contentsdict.items(), rows_class):
+ for (clss, contents), row_clss in zip(ContentsDict.items(), Rows_Class):
for content, row_cont in zip(contents, range(len(contents))):
key = GetShortContentName(content)
w = content_infos[key][0]
@@ -241,8 +240,7 @@
return sht
-def GenerateAverageSheet(wb, sumsht, dnScalAlgos, upScalAlgos, ratio,
- contentsdict, rows_class):
+def GenerateAverageSheet(wb, sumsht, dnScalAlgos, upScalAlgos, ratio):
logger.info("start generate average sheet for ratio %2.2f" % ratio)
rdsht = sumsht
@@ -262,7 +260,7 @@
sht.write(0, col, algos)
sht.write_row(1, col, StatsMetrics)
- step = len(contentsdict) + 1 # 1 extra row for total of each class
+ step = len(ContentsDict) + 1 # 1 extra row for total of each class
startrow = 2
rows_qtymtr = [startrow + step * i for i in range(len(QualityList))]
for qty, row_qm, y in zip(QualityList, rows_qtymtr, range(len(QualityList))):
@@ -274,9 +272,9 @@
#charts.append(chart)
totalnum_contents = 0
- for (cls, contents), idx, rdrow_cls in zip(contentsdict.items(),
- range(len(contentsdict)),
- rows_class):
+ for (cls, contents), idx, rdrow_cls in zip(ContentsDict.items(),
+ range(len(ContentsDict)),
+ Rows_Class):
sht.write(row_qm + idx, 1, cls)
num_content = len(contents)
totalnum_contents = totalnum_contents + num_content
@@ -304,7 +302,7 @@
sht.write(row_qm + idx, wtcol + 3, formula)
#write total contents statistics
- wtrow = row_qm + len(contentsdict)
+ wtrow = row_qm + len(ContentsDict)
sht.write(wtrow, 1, 'Total')
sht.write(wtrow, 2, totalnum_contents)
for rdcol, wtcol in zip(ScalSumQty_WtCols, cols_avg):
@@ -349,18 +347,17 @@
logger.info("start generating scaling quality summary excel file.......")
sumexcFile = GetScalingResultExcelFile(len(scaleRatios), len(DnScaleRatio))
wb = xlsxwriter.Workbook(sumexcFile)
- # to generate rows number of starting of each class: rows_class
- contentsdict, rows_class = CalcRowsClassAndContentDict(ScalQty_startRow,
+ # to generate rows number of starting of each class: Rows_Class
+ global ContentsDict, Rows_Class
+ ContentsDict, Rows_Class = CalcRowsClassAndContentDict(ScalQty_startRow,
ContentPath, Clips)
sumShts = []
for ratio in scaleRatios:
- sht = GenerateSummarySheet(wb, dnScalAlgos, upScalAlgos, ratio,
- contentsdict, rows_class, path_log)
+ sht = GenerateSummarySheet(wb, dnScalAlgos, upScalAlgos, ratio, path_log)
sumShts.append(sht)
for ratio, sumsht in zip(scaleRatios, sumShts):
- GenerateAverageSheet(wb, sumsht, dnScalAlgos, upScalAlgos, ratio,
- contentsdict, rows_class)
+ GenerateAverageSheet(wb, sumsht, dnScalAlgos, upScalAlgos, ratio)
wb.close()
logger.info("finish saving scaling quality results to excel files.......")
diff --git a/tools/convexhull_framework/src/Utils.py b/tools/convexhull_framework/src/Utils.py
index 632b1a9..ccac884 100644
--- a/tools/convexhull_framework/src/Utils.py
+++ b/tools/convexhull_framework/src/Utils.py
@@ -12,7 +12,6 @@
import os
import re
-import xlrd
import subprocess
import time
import logging
@@ -26,7 +25,7 @@
os.remove(file)
def CreateNewSubfolder(parent, name):
- if name == '' or name == None:
+ if name == '' or name is None:
return None
folder = os.path.join(parent, name)
if not os.path.exists(folder):
@@ -114,27 +113,6 @@
return contentsdict, rows_class
-def SweepScalingAlgosInOneResultFile(infile_path):
- dnscls = []
- upscls = []
- resultfiles = os.listdir(infile_path)
- # here assume all result files includes same combinations of dn and up
- # scaling algos
- file = os.path.join(infile_path, resultfiles[0])
- if os.path.isfile(file):
- rdwb = xlrd.open_workbook(os.path.join(infile_path, resultfiles[0]))
- else:
- return dnscls, upscls
- if rdwb is not None:
- shtnms = rdwb.sheet_names()
- for shtname in shtnms:
- item = re.findall(r"(.+)\-\-(.+)", shtname)
- dnsl = item[0][0]
- upsl = item[0][1]
- dnscls.append(dnsl)
- upscls.append(upsl)
-
- return dnscls, upscls
def CreateChart_Scatter(wb, title, xaxis_name, yaxis_name):
chart = wb.add_chart({'type': 'scatter', 'subtype': 'straight_with_markers'})
diff --git a/tools/convexhull_framework/src/VideoEncoder.py b/tools/convexhull_framework/src/VideoEncoder.py
index 8bbf32c..412e710 100644
--- a/tools/convexhull_framework/src/VideoEncoder.py
+++ b/tools/convexhull_framework/src/VideoEncoder.py
@@ -28,11 +28,14 @@
def EncodeWithAOM_AV1(infile, QP, framenum, framerate, width, height, outfile,
preset):
- args = " --codec=av1 -v --psnr --ivf --frame-parallel=0 --cpu-used=%s" \
- " --limit=%d --auto-alt-ref=1 --passes=2 " \
- "--threads=1 --lag-in-frames=25 --end-usage=q --cq-level=%d" \
- " -w %d -h %d -o %s %s"\
- % (preset, framenum, QP, width, height, outfile, infile)
+ args = " --verbose --codec=av1 -v --psnr --ivf --frame-parallel=0 --cpu-used=%s" \
+ " --limit=%d --auto-alt-ref=1 --passes=1 --end-usage=q --i420" \
+ " --min-gf-interval=16 --max-gf-interval=16 --gf-min-pyr-height=4 " \
+ " --gf-max-pyr-height=4 --threads=1 --lag-in-frames=19 --end-usage=q " \
+ " --kf-min-dist=65 --kf-max-dist=65 --use-fixed-qp-offsets=1 --deltaq-mode=0 " \
+ " --enable-tpl-model=0 --enable-keyframe-filtering=0 " \
+ " --fps=60/1 --input-bit-depth=8 --qp=%d -w %d -h %d -o %s %s"\
+ % (preset, framenum, 4*QP, width, height, outfile, infile)
cmd = AOMENC + args
ExecuteCmd(cmd, LogCmdOnly)
diff --git a/tools/convexhull_framework/src/VideoScaler.py b/tools/convexhull_framework/src/VideoScaler.py
index 2420d1b..51e85c0 100644
--- a/tools/convexhull_framework/src/VideoScaler.py
+++ b/tools/convexhull_framework/src/VideoScaler.py
@@ -31,7 +31,8 @@
#use ffmpeg to do image rescaling for yuv420 8bit
def RescaleWithFfmpeg(infile, inw, inh, outw, outh, algo, outfile, num, app_path):
args = " -y -s:v %dx%d -i %s -vf scale=%dx%d -c:v rawvideo -pix_fmt yuv420p" \
- " -sws_flags %s+accurate_rnd+full_chroma_int -sws_dither none" \
+ " -sws_flags %s+accurate_rnd+full_chroma_int+full_chroma_inp+bitexact"\
+ "+print_info -sws_dither none" \
% (inw, inh, infile, outw, outh, algo)
if (algo == 'lanczos'):
args += " -param0 5 "