-
Notifications
You must be signed in to change notification settings - Fork 27
/
gen_jvet.py
executable file
·221 lines (197 loc) · 8.26 KB
/
gen_jvet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
#!/usr/bin/env python3
# Fluster - testing framework for decoders conformance
# Copyright (C) 2024, Fluendo, S.A.
# Author: Ruben Sanchez Sanchez <[email protected]>, Fluendo, S.A.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation, either version 3
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <https://www.gnu.org/licenses/>.
import argparse
from html.parser import HTMLParser
import os
import re
import sys
import urllib.request
import multiprocessing
from subprocess import CalledProcessError
# pylint: disable=wrong-import-position
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from fluster import utils
from fluster.codec import Codec, OutputFormat
from fluster.test_suite import TestSuite, TestVector
# pylint: enable=wrong-import-position
BASE_URL = "https://www.itu.int/"
H266_URL = BASE_URL + "wftp3/av-arch/jvet-site/bitstream_exchange/VVC/draft_conformance/"
# When there is only 1 element in below variables there must be a ", " at the end.
# Otherwise utils.find_by_ext() considers each character of the string as an individual
# element in the list
BITSTREAM_EXTS = (".bit", )
MD5_EXTS = (".yuv.md5", )
class HREFParser(HTMLParser):
"""Custom parser to find href links"""
def __init__(self):
self.links = []
super().__init__()
def error(self, message):
print(message)
def handle_starttag(self, tag, attrs):
# Only parse the 'anchor' tag.
if tag == "a":
# Check the list of defined attributes.
for name, value in attrs:
# If href is defined, print it.
if name == "href":
base_url = BASE_URL if BASE_URL[-1] != "/" else BASE_URL[0:-1]
self.links.append(base_url + value)
class JVETGenerator:
"""Generates a test suite from the conformance bitstreams"""
def __init__(
self,
name: str,
suite_name: str,
codec: Codec,
description: str,
site: str,
use_ffprobe: bool = False
):
self.name = name
self.suite_name = suite_name
self.codec = codec
self.description = description
self.site = site
self.use_ffprobe = use_ffprobe
def generate(self, download, jobs):
"""Generates the test suite and saves it to a file"""
output_filepath = os.path.join(self.suite_name + ".json")
test_suite = TestSuite(
output_filepath,
"resources",
self.suite_name,
self.codec,
self.description,
dict(),
)
hparser = HREFParser()
print(f"Download list of bitstreams from {self.site + self.name}")
with urllib.request.urlopen(self.site + self.name) as resp:
data = str(resp.read())
hparser.feed(data)
for url in hparser.links[1:]:
file_url = os.path.basename(url)
name = os.path.splitext(file_url)[0]
file_input = f"{name}.bin"
test_vector = TestVector(name, url, "__skip__", file_input, OutputFormat.YUV420P, "")
test_suite.test_vectors[name] = test_vector
if download:
test_suite.download(
jobs=jobs,
out_dir=test_suite.resources_dir,
verify=False,
extract_all=True,
keep_file=True,
)
for test_vector in test_suite.test_vectors.values():
dest_dir = os.path.join(
test_suite.resources_dir, test_suite.name, test_vector.name
)
dest_path = os.path.join(dest_dir, os.path.basename(test_vector.source))
test_vector.input_file = utils.find_by_ext(dest_dir, BITSTREAM_EXTS)
absolute_input_path = test_vector.input_file
test_vector.input_file = test_vector.input_file.replace(
os.path.join(
test_suite.resources_dir, test_suite.name, test_vector.name
)
+ os.sep,
"",
)
if not test_vector.input_file:
raise Exception(f"Bitstream file not found in {dest_dir}")
test_vector.source_checksum = utils.file_checksum(dest_path)
if self.use_ffprobe:
try:
ffprobe = utils.normalize_binary_cmd('ffprobe')
command = [ffprobe, '-v', 'error', '-strict', '-2',
'-select_streams', 'v:0',
'-show_entries', 'stream=pix_fmt', '-of',
'default=nokey=1:noprint_wrappers=1',
absolute_input_path]
result = utils.run_command_with_output(command).splitlines()
pix_fmt = result[0]
test_vector.output_format = OutputFormat[pix_fmt.upper()]
except KeyError as key_err:
exceptions = {
# All below test vectors need to be analysed with respect
# to output format, for now remains undetermined
"VPS_C_ERICSSON_1": OutputFormat.NONE
}
if test_vector.name in exceptions.keys():
test_vector.output_format = exceptions[test_vector.name]
else:
raise key_err
except CalledProcessError as proc_err:
exceptions = {
# All below test vectors cause ffprobe to crash
"MNUT_A_Nokia_3": OutputFormat.NONE,
"MNUT_B_Nokia_2": OutputFormat.NONE,
"SUBPIC_C_ERICSSON_1": OutputFormat.NONE,
"SUBPIC_D_ERICSSON_1": OutputFormat.NONE
}
if test_vector.name in exceptions.keys():
test_vector.output_format = exceptions[test_vector.name]
else:
raise proc_err
self._fill_checksum_h266(test_vector, dest_dir)
test_suite.to_json_file(output_filepath)
print("Generate new test suite: " + test_suite.name + ".json")
@staticmethod
def _fill_checksum_h266(test_vector, dest_dir):
checksum_file = utils.find_by_ext(dest_dir, MD5_EXTS)
if checksum_file is None:
raise Exception("MD5 not found")
with open(checksum_file, "r") as checksum_file:
regex = re.compile(rf"([a-fA-F0-9]{{32,}}).*(?:\.(yuv|rgb|gbr))?")
lines = checksum_file.readlines()
# Filter out empty lines
filtered_lines = [line.strip() for line in lines if line.strip()]
# Prefer lines matching the regex pattern
match = next((regex.match(line) for line in filtered_lines if regex.match(line)), None)
if match:
test_vector.result = match.group(1).lower()
# Assert that we have extracted a valid MD5 from the file
assert len(test_vector.result) == 32 and re.search(
r"^[a-fA-F0-9]{32}$",
test_vector.result) is not None, f"{test_vector.result} is not a valid MD5 hash"
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--skip-download",
help="skip extracting tarball",
action="store_true",
default=False,
)
parser.add_argument(
"-j",
"--jobs",
help="number of parallel jobs to use. 2x logical cores by default",
type=int,
default=2 * multiprocessing.cpu_count(),
)
args = parser.parse_args()
generator = JVETGenerator(
'draft6',
'JVET-VVC_draft6',
Codec.H266,
'JVET VVC draft6',
H266_URL,
True,
)
generator.generate(not args.skip_download, args.jobs)