framework/replay: disable AA accounting when comparing with no tolerance
[piglit.git] / framework / backends / json.py
bloba0126f652c971851cdf084e3bd0f6700d98bf415
1 # coding=utf-8
2 # Copyright (c) 2014, 2016-2017, 2019-2020 Intel Corporation
4 # Permission is hereby granted, free of charge, to any person obtaining a copy
5 # of this software and associated documentation files (the "Software"), to deal
6 # in the Software without restriction, including without limitation the rights
7 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 # copies of the Software, and to permit persons to whom the Software is
9 # furnished to do so, subject to the following conditions:
11 # The above copyright notice and this permission notice shall be included in
12 # all copies or substantial portions of the Software.
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 # SOFTWARE.
22 """ Module providing json backend for piglit """
24 import collections
25 import functools
26 import os
27 import shutil
28 import sys
30 try:
31 import simplejson as json
32 except ImportError:
33 import json
35 try:
36 import jsonstreams
37 _STREAMS = True
38 except ImportError:
39 _STREAMS = False
41 from framework import status, results, exceptions
42 from .abstract import FileBackend, write_compressed
43 from .register import Registry
44 from . import compression
46 __all__ = [
47 'REGISTRY',
48 'JSONBackend',
51 # The current version of the JSON results
52 CURRENT_JSON_VERSION = 10
54 # The minimum JSON format supported
55 MINIMUM_SUPPORTED_VERSION = 7
57 # The level to indent a final file
58 INDENT = 4
61 def piglit_encoder(obj):
62 """ Encoder for piglit that can transform additional classes into json
64 Adds support for status.Status objects and for set() instances
66 """
67 if isinstance(obj, status.Status):
68 return str(obj)
69 elif isinstance(obj, set):
70 return list(obj)
71 elif hasattr(obj, 'to_json'):
72 return obj.to_json()
73 return obj
76 class JSONBackend(FileBackend):
77 """ Piglit's native JSON backend
79 This writes out to piglit's native json backend. This class uses the python
80 json module or the simplejson.
82 This class is atomic, writes either completely fail or completely succeed.
83 To achieve this it writes individual files for each test and for the
84 metadata, and composes them at the end into a single file and removes the
85 intermediate files. When it tries to compose these files if it cannot read
86 a file it just ignores it, making the result atomic.
88 """
89 _file_extension = 'json'
91 def initialize(self, metadata):
92 """ Write boilerplate json code
94 This writes all of the json except the actual tests.
96 Arguments:
97 metadata -- a dictionary of values to be written
99 """
100 # If metadata is None then this is a loaded result and there is no need
101 # to initialize
102 metadata['results_version'] = CURRENT_JSON_VERSION
104 with open(os.path.join(self._dest, 'metadata.json'), 'w') as f:
105 json.dump(metadata, f, default=piglit_encoder)
107 # Flush the metadata to the disk, always
108 f.flush()
109 os.fsync(f.fileno())
111 # make the directory for the tests
112 try:
113 os.mkdir(os.path.join(self._dest, 'tests'))
114 except OSError:
115 pass
117 def finalize(self, metadata=None):
118 """ End json serialization and cleanup
120 This method is called after all of tests are written, it closes any
121 containers that are still open and closes the file
124 tests_dir = os.path.join(self._dest, 'tests')
125 file_list = sorted(
126 (f for f in os.listdir(tests_dir) if f.endswith('.json')),
127 key=lambda p: int(os.path.splitext(p)[0]))
129 # If jsonstreams is not present then build a complete tree of all of
130 # the data and write it with json.dump
131 if not _STREAMS:
132 # Create a dictionary that is full of data to be written to a
133 # single file
134 data = collections.OrderedDict()
136 # Load the metadata and put it into a dictionary
137 with open(os.path.join(self._dest, 'metadata.json'), 'r') as f:
138 data.update(json.load(f))
140 # If there is more metadata add it the dictionary
141 if metadata:
142 data.update(metadata)
144 # Add the tests to the dictionary
145 data['tests'] = collections.OrderedDict()
147 for test in file_list:
148 test = os.path.join(tests_dir, test)
149 if os.path.isfile(test):
150 # Try to open the json snippets. If we fail to open a test
151 # then throw the whole thing out. This gives us atomic
152 # writes, the writing worked and is valid or it didn't
153 # work.
154 try:
155 with open(test, 'r') as f:
156 data['tests'].update(json.load(f))
157 except ValueError:
158 pass
160 if not data['tests']:
161 raise exceptions.PiglitUserError(
162 'No tests were run, not writing a result file',
163 exitcode=2)
165 data = results.TestrunResult.from_dict(data)
167 # write out the combined file. Use the compression writer from the
168 # FileBackend
169 with self._write_final(os.path.join(self._dest, 'results.json')) as f:
170 json.dump(data, f, default=piglit_encoder, indent=INDENT)
172 # Otherwise use jsonstreams to write the final dictionary. This uses an
173 # external library, but is slightly faster and uses considerably less
174 # memory that building a complete tree.
175 else:
176 encoder = functools.partial(json.JSONEncoder, default=piglit_encoder)
178 with self._write_final(os.path.join(self._dest, 'results.json')) as f:
179 with jsonstreams.Stream(jsonstreams.Type.OBJECT, fd=f, indent=4,
180 encoder=encoder, pretty=True) as s:
181 s.write('__type__', 'TestrunResult')
182 with open(os.path.join(self._dest, 'metadata.json'),
183 'r') as n:
184 s.iterwrite(json.load(n, object_pairs_hook=collections.OrderedDict).items())
186 if metadata:
187 s.iterwrite(metadata.items())
189 with s.subobject('tests') as t:
190 wrote = False
191 for test in file_list:
192 test = os.path.join(tests_dir, test)
193 if os.path.isfile(test):
194 try:
195 with open(test, 'r') as f:
196 a = json.load(f)
197 except ValueError:
198 continue
200 t.iterwrite(a.items())
201 wrote = True
203 if not wrote:
204 raise exceptions.PiglitUserError(
205 'No tests were run.',
206 exitcode=2)
208 # Delete the temporary files
209 os.unlink(os.path.join(self._dest, 'metadata.json'))
210 shutil.rmtree(os.path.join(self._dest, 'tests'))
212 @staticmethod
213 def _write(f, name, data):
214 json.dump({name: data}, f, default=piglit_encoder)
217 def load_results(filename, compression_):
218 """ Loader function for TestrunResult class
220 This function takes a single argument of a results file.
222 It makes quite a few assumptions, first it assumes that it has been passed
223 a folder, if that fails then it looks for a plain text json file called
224 "main"
227 # This will load any file or file-like thing. That would include pipes and
228 # file descriptors
229 if not os.path.isdir(filename):
230 filepath = filename
231 elif (os.path.exists(os.path.join(filename, 'metadata.json')) and
232 not os.path.exists(os.path.join(
233 filename, 'results.json.' + compression_))):
234 # We want to hit this path only if there isn't a
235 # results.json.<compressions>, since otherwise we'll continually
236 # regenerate values that we don't need to.
237 return _resume(filename)
238 else:
239 # Look for a compressed result first, then a bare result.
240 for name in ['results.json.{}'.format(compression_), 'results.json']:
241 if os.path.exists(os.path.join(filename, name)):
242 filepath = os.path.join(filename, name)
243 break
244 else:
245 raise exceptions.PiglitFatalError(
246 'No results found in "{}" (compression: {})'.format(
247 filename, compression_))
249 assert compression_ in compression.COMPRESSORS, \
250 'unsupported compression type'
252 with compression.DECOMPRESSORS[compression_](filepath) as f:
253 testrun = _load(f)
255 return results.TestrunResult.from_dict(_update_results(testrun, filepath))
258 def set_meta(results):
259 """Set json specific metadata on a TestrunResult."""
260 results.results_version = CURRENT_JSON_VERSION
263 def _load(results_file):
264 """Load a json results instance and return a TestrunResult.
266 This function converts an existing, fully completed json run.
269 try:
270 result = json.load(results_file, object_pairs_hook=collections.OrderedDict)
271 except ValueError as e:
272 raise exceptions.PiglitFatalError(
273 'While loading json results file: "{}",\n'
274 'the following error occurred:\n{}'.format(results_file.name,
275 str(e)))
277 return result
280 def _resume(results_dir):
281 """Loads a partially completed json results directory."""
282 # Pylint can't infer that the json being loaded is a dict
283 # pylint: disable=maybe-no-member
284 assert os.path.isdir(results_dir), \
285 "TestrunResult.resume() requires a directory"
287 # Load the metadata
288 with open(os.path.join(results_dir, 'metadata.json'), 'r') as f:
289 meta = json.load(f)
290 assert meta['results_version'] == CURRENT_JSON_VERSION, \
291 "Old results version, resume impossible"
293 meta['tests'] = collections.OrderedDict()
295 # Load all of the test names and added them to the test list
296 tests_dir = os.path.join(results_dir, 'tests')
297 file_list = sorted(
298 (l for l in os.listdir(tests_dir) if l.endswith('.json')),
299 key=lambda p: int(os.path.splitext(p)[0]))
301 for file_ in file_list:
302 with open(os.path.join(tests_dir, file_), 'r') as f:
303 try:
304 meta['tests'].update(json.load(f))
305 except ValueError:
306 continue
308 return results.TestrunResult.from_dict(meta)
311 def _update_results(results, filepath):
312 """ Update results to the latest version
314 This function is a wrapper for other update_* functions, providing
315 incremental updates from one version to another.
317 Arguments:
318 results -- a TestrunResults instance
319 filepath -- the name of the file that the Testrunresults instance was
320 created from
324 def loop_updates(results):
325 """ Helper to select the proper update sequence """
326 # Python lacks a switch statement, the workaround is to use a
327 # dictionary
328 updates = {
329 7: _update_seven_to_eight,
330 8: _update_eight_to_nine,
331 9: _update_nine_to_ten,
334 while results['results_version'] < CURRENT_JSON_VERSION:
335 results = updates[results['results_version']](results)
337 return results
339 if results['results_version'] < MINIMUM_SUPPORTED_VERSION:
340 raise exceptions.PiglitFatalError(
341 'Unsupported version "{}", '
342 'minimum supported version is "{}"'.format(
343 results['results_version'], MINIMUM_SUPPORTED_VERSION))
345 # If the results version is the current version there is no need to
346 # update, just return the results
347 if results['results_version'] == CURRENT_JSON_VERSION:
348 return results
350 results = loop_updates(results)
352 # Move the old results, and write the current results
353 try:
354 os.rename(filepath, filepath + '.old')
355 write_results(results, filepath)
356 except OSError:
357 print("WARNING: Could not write updated results {}".format(filepath),
358 file=sys.stderr)
360 return results
363 def write_results(results, file_):
364 """Write the values of the results out to a file."""
365 with write_compressed(file_) as f:
366 json.dump(results, f, default=piglit_encoder, indent=INDENT)
368 return True
371 def _update_seven_to_eight(result):
372 """Update json results from version 7 to 8.
374 This update replaces the time attribute float with a TimeAttribute object,
375 which stores a start time and an end time, and provides methods for getting
376 total and delta.
378 This value is used for both TestResult.time and TestrunResult.time_elapsed.
381 for test in result['tests'].values():
382 test['time'] = {'start': 0.0, 'end': float(test['time']),
383 '__type__': 'TimeAttribute'}
385 result['time_elapsed'] = {'start': 0.0, 'end':
386 float(result['time_elapsed']),
387 '__type__': 'TimeAttribute'}
389 result['results_version'] = 8
391 return result
394 def _update_eight_to_nine(result):
395 """Update json results from version 8 to 9.
397 This changes the PID field of the TestResult object to a list of Integers
398 or null rather than a single integer or null.
401 for test in result['tests'].values():
402 if 'pid' in test:
403 test['pid'] = [test['pid']]
404 else:
405 test['pid'] = []
407 result['results_version'] = 9
409 return result
412 def _update_nine_to_ten(result):
413 result['info'] = {}
414 result['info']['system'] = {}
415 for e in ['glxinfo', 'wglinfo', 'clinfo', 'lspci', 'uname']:
416 r = result.pop(e)
417 if r:
418 result['info']['system'][e] = r
420 result['results_version'] = 10
422 return result
425 REGISTRY = Registry(
426 extensions=['.json'],
427 backend=JSONBackend,
428 load=load_results,
429 meta=set_meta,
430 write=write_results,