ext_transform_feedback: document missing mode in usage
[piglit.git] / framework / backends / json.py
blobbb7d75efe267e09ee884ffa00c242b13459458c4
1 # coding=utf-8
2 # Copyright (c) 2014, 2016-2017, 2019-2020 Intel Corporation
4 # Permission is hereby granted, free of charge, to any person obtaining a copy
5 # of this software and associated documentation files (the "Software"), to deal
6 # in the Software without restriction, including without limitation the rights
7 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 # copies of the Software, and to permit persons to whom the Software is
9 # furnished to do so, subject to the following conditions:
11 # The above copyright notice and this permission notice shall be included in
12 # all copies or substantial portions of the Software.
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 # SOFTWARE.
22 """ Module providing json backend for piglit """
24 import collections
25 import functools
26 import os
27 import shutil
28 import sys
30 import json
32 try:
33 import jsonstreams
34 _STREAMS = True
35 except ImportError:
36 _STREAMS = False
38 from framework import status, results, exceptions
39 from .abstract import FileBackend, write_compressed
40 from .register import Registry
41 from . import compression
43 __all__ = [
44 'REGISTRY',
45 'JSONBackend',
48 # The current version of the JSON results
49 CURRENT_JSON_VERSION = 10
51 # The minimum JSON format supported
52 MINIMUM_SUPPORTED_VERSION = 7
54 # The level to indent a final file
55 INDENT = 4
58 def piglit_encoder(obj):
59 """ Encoder for piglit that can transform additional classes into json
61 Adds support for status.Status objects and for set() instances
63 """
64 if isinstance(obj, status.Status):
65 return str(obj)
66 elif isinstance(obj, set):
67 return list(obj)
68 elif hasattr(obj, 'to_json'):
69 return obj.to_json()
70 return obj
73 class JSONBackend(FileBackend):
74 """ Piglit's native JSON backend
76 This writes out to piglit's native json backend. This class uses the python
77 json module.
79 This class is atomic, writes either completely fail or completely succeed.
80 To achieve this it writes individual files for each test and for the
81 metadata, and composes them at the end into a single file and removes the
82 intermediate files. When it tries to compose these files if it cannot read
83 a file it just ignores it, making the result atomic.
85 """
86 _file_extension = 'json'
88 def initialize(self, metadata):
89 """ Write boilerplate json code
91 This writes all of the json except the actual tests.
93 Arguments:
94 metadata -- a dictionary of values to be written
96 """
97 # If metadata is None then this is a loaded result and there is no need
98 # to initialize
99 metadata['results_version'] = CURRENT_JSON_VERSION
101 with open(os.path.join(self._dest, 'metadata.json'), 'w') as f:
102 json.dump(metadata, f, default=piglit_encoder)
104 # Flush the metadata to the disk, always
105 f.flush()
106 os.fsync(f.fileno())
108 # make the directory for the tests
109 try:
110 os.mkdir(os.path.join(self._dest, 'tests'))
111 except OSError:
112 pass
114 def finalize(self, metadata=None):
115 """ End json serialization and cleanup
117 This method is called after all of tests are written, it closes any
118 containers that are still open and closes the file
121 tests_dir = os.path.join(self._dest, 'tests')
122 file_list = sorted(
123 (f for f in os.listdir(tests_dir) if f.endswith('.json')),
124 key=lambda p: int(os.path.splitext(p)[0]))
126 # If jsonstreams is not present then build a complete tree of all of
127 # the data and write it with json.dump
128 if not _STREAMS:
129 # Create a dictionary that is full of data to be written to a
130 # single file
131 data = collections.OrderedDict()
133 # Load the metadata and put it into a dictionary
134 with open(os.path.join(self._dest, 'metadata.json'), 'r') as f:
135 data.update(json.load(f))
137 # If there is more metadata add it the dictionary
138 if metadata:
139 data.update(metadata)
141 # Add the tests to the dictionary
142 data['tests'] = collections.OrderedDict()
144 for test in file_list:
145 test = os.path.join(tests_dir, test)
146 if os.path.isfile(test):
147 # Try to open the json snippets. If we fail to open a test
148 # then throw the whole thing out. This gives us atomic
149 # writes, the writing worked and is valid or it didn't
150 # work.
151 try:
152 with open(test, 'r') as f:
153 data['tests'].update(json.load(f))
154 except ValueError:
155 pass
157 if not data['tests']:
158 raise exceptions.PiglitUserError(
159 'No tests were run, not writing a result file',
160 exitcode=2)
162 data = results.TestrunResult.from_dict(data)
164 # write out the combined file. Use the compression writer from the
165 # FileBackend
166 with self._write_final(os.path.join(self._dest, 'results.json')) as f:
167 json.dump(data, f, default=piglit_encoder, indent=INDENT)
169 # Otherwise use jsonstreams to write the final dictionary. This uses an
170 # external library, but is slightly faster and uses considerably less
171 # memory that building a complete tree.
172 else:
173 encoder = functools.partial(json.JSONEncoder, default=piglit_encoder)
175 with self._write_final(os.path.join(self._dest, 'results.json')) as f:
176 with jsonstreams.Stream(jsonstreams.Type.OBJECT, fd=f, indent=4,
177 encoder=encoder, pretty=True) as s:
178 s.write('__type__', 'TestrunResult')
179 with open(os.path.join(self._dest, 'metadata.json'),
180 'r') as n:
181 s.iterwrite(json.load(n, object_pairs_hook=collections.OrderedDict).items())
183 if metadata:
184 s.iterwrite(metadata.items())
186 with s.subobject('tests') as t:
187 wrote = False
188 for test in file_list:
189 test = os.path.join(tests_dir, test)
190 if os.path.isfile(test):
191 try:
192 with open(test, 'r') as f:
193 a = json.load(f)
194 except ValueError:
195 continue
197 t.iterwrite(a.items())
198 wrote = True
200 if not wrote:
201 raise exceptions.PiglitUserError(
202 'No tests were run.',
203 exitcode=2)
205 # Delete the temporary files
206 os.unlink(os.path.join(self._dest, 'metadata.json'))
207 shutil.rmtree(os.path.join(self._dest, 'tests'))
209 @staticmethod
210 def _write(f, name, data):
211 json.dump({name: data}, f, default=piglit_encoder)
214 def load_results(filename, compression_):
215 """ Loader function for TestrunResult class
217 This function takes a single argument of a results file.
219 It makes quite a few assumptions, first it assumes that it has been passed
220 a folder, if that fails then it looks for a plain text json file called
221 "main"
224 # This will load any file or file-like thing. That would include pipes and
225 # file descriptors
226 if not os.path.isdir(filename):
227 filepath = filename
228 elif (os.path.exists(os.path.join(filename, 'metadata.json')) and
229 not os.path.exists(os.path.join(
230 filename, 'results.json.' + compression_))):
231 # We want to hit this path only if there isn't a
232 # results.json.<compressions>, since otherwise we'll continually
233 # regenerate values that we don't need to.
234 return _resume(filename)
235 else:
236 # Look for a compressed result first, then a bare result.
237 for name in ['results.json.{}'.format(compression_), 'results.json']:
238 if os.path.exists(os.path.join(filename, name)):
239 filepath = os.path.join(filename, name)
240 break
241 else:
242 raise exceptions.PiglitFatalError(
243 'No results found in "{}" (compression: {})'.format(
244 filename, compression_))
246 assert compression_ in compression.COMPRESSORS, \
247 'unsupported compression type'
249 with compression.DECOMPRESSORS[compression_](filepath) as f:
250 testrun = _load(f)
252 return results.TestrunResult.from_dict(_update_results(testrun, filepath))
255 def set_meta(results):
256 """Set json specific metadata on a TestrunResult."""
257 results.results_version = CURRENT_JSON_VERSION
260 def _load(results_file):
261 """Load a json results instance and return a TestrunResult.
263 This function converts an existing, fully completed json run.
266 try:
267 result = json.load(results_file, object_pairs_hook=collections.OrderedDict)
268 except ValueError as e:
269 raise exceptions.PiglitFatalError(
270 'While loading json results file: "{}",\n'
271 'the following error occurred:\n{}'.format(results_file.name,
272 str(e)))
274 return result
277 def _resume(results_dir):
278 """Loads a partially completed json results directory."""
279 # Pylint can't infer that the json being loaded is a dict
280 # pylint: disable=maybe-no-member
281 assert os.path.isdir(results_dir), \
282 "TestrunResult.resume() requires a directory"
284 # Load the metadata
285 with open(os.path.join(results_dir, 'metadata.json'), 'r') as f:
286 meta = json.load(f)
287 assert meta['results_version'] == CURRENT_JSON_VERSION, \
288 "Old results version, resume impossible"
290 meta['tests'] = collections.OrderedDict()
292 # Load all of the test names and added them to the test list
293 tests_dir = os.path.join(results_dir, 'tests')
294 file_list = sorted(
295 (l for l in os.listdir(tests_dir) if l.endswith('.json')),
296 key=lambda p: int(os.path.splitext(p)[0]))
298 for file_ in file_list:
299 with open(os.path.join(tests_dir, file_), 'r') as f:
300 try:
301 meta['tests'].update(json.load(f))
302 except ValueError:
303 continue
305 return results.TestrunResult.from_dict(meta)
308 def _update_results(results, filepath):
309 """ Update results to the latest version
311 This function is a wrapper for other update_* functions, providing
312 incremental updates from one version to another.
314 Arguments:
315 results -- a TestrunResults instance
316 filepath -- the name of the file that the Testrunresults instance was
317 created from
321 def loop_updates(results):
322 """ Helper to select the proper update sequence """
323 # Python lacks a switch statement, the workaround is to use a
324 # dictionary
325 updates = {
326 7: _update_seven_to_eight,
327 8: _update_eight_to_nine,
328 9: _update_nine_to_ten,
331 while results['results_version'] < CURRENT_JSON_VERSION:
332 results = updates[results['results_version']](results)
334 return results
336 if results['results_version'] < MINIMUM_SUPPORTED_VERSION:
337 raise exceptions.PiglitFatalError(
338 'Unsupported version "{}", '
339 'minimum supported version is "{}"'.format(
340 results['results_version'], MINIMUM_SUPPORTED_VERSION))
342 # If the results version is the current version there is no need to
343 # update, just return the results
344 if results['results_version'] == CURRENT_JSON_VERSION:
345 return results
347 results = loop_updates(results)
349 # Move the old results, and write the current results
350 try:
351 os.rename(filepath, filepath + '.old')
352 write_results(results, filepath)
353 except OSError:
354 print("WARNING: Could not write updated results {}".format(filepath),
355 file=sys.stderr)
357 return results
360 def write_results(results, file_):
361 """Write the values of the results out to a file."""
362 with write_compressed(file_) as f:
363 json.dump(results, f, default=piglit_encoder, indent=INDENT)
365 return True
368 def _update_seven_to_eight(result):
369 """Update json results from version 7 to 8.
371 This update replaces the time attribute float with a TimeAttribute object,
372 which stores a start time and an end time, and provides methods for getting
373 total and delta.
375 This value is used for both TestResult.time and TestrunResult.time_elapsed.
378 for test in result['tests'].values():
379 test['time'] = {'start': 0.0, 'end': float(test['time']),
380 '__type__': 'TimeAttribute'}
382 result['time_elapsed'] = {'start': 0.0, 'end':
383 float(result['time_elapsed']),
384 '__type__': 'TimeAttribute'}
386 result['results_version'] = 8
388 return result
391 def _update_eight_to_nine(result):
392 """Update json results from version 8 to 9.
394 This changes the PID field of the TestResult object to a list of Integers
395 or null rather than a single integer or null.
398 for test in result['tests'].values():
399 if 'pid' in test:
400 test['pid'] = [test['pid']]
401 else:
402 test['pid'] = []
404 result['results_version'] = 9
406 return result
409 def _update_nine_to_ten(result):
410 result['info'] = {}
411 result['info']['system'] = {}
412 for e in ['glxinfo', 'wglinfo', 'clinfo', 'lspci', 'uname']:
413 r = result.pop(e)
414 if r:
415 result['info']['system'][e] = r
417 result['results_version'] = 10
419 return result
422 REGISTRY = Registry(
423 extensions=['.json'],
424 backend=JSONBackend,
425 load=load_results,
426 meta=set_meta,
427 write=write_results,