cmake: remove this 12 years old workaround taking too many lines
[piglit.git] / framework / replay / options.py
blob7196a198cd76737620d6f1673f9f7250a6f26410
1 # coding=utf-8
3 # Copyright (c) 2015-2016, 2019 Intel Corporation
4 # Copyright © 2020 Valve Corporation.
6 # Permission is hereby granted, free of charge, to any person obtaining a
7 # copy of this software and associated documentation files (the "Software"),
8 # to deal in the Software without restriction, including without limitation
9 # the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 # and/or sell copies of the Software, and to permit persons to whom the
11 # Software is furnished to do so, subject to the following conditions:
13 # The above copyright notice and this permission notice shall be included
14 # in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 # OTHER DEALINGS IN THE SOFTWARE.
24 # SPDX-License-Identifier: MIT
26 """Stores global replay options.
28 This is as close to a true global function as python gets.
30 """
32 import sys
33 from urllib.parse import urlparse
35 __all__ = ['OPTIONS']
37 # pylint: disable=too-few-public-methods
40 def _safe_urlparse(url):
41 if url:
42 try:
43 parsed_url = urlparse(url)
44 return parsed_url
45 except Exception as e:
46 print(e, file=sys.stderr)
48 return None
51 class _Options(object): # pylint: disable=too-many-instance-attributes
52 """Contains all options for a replay run.
54 This is used as a sort of global state object.
56 Options are as follows:
57 device_name -- The device against we are replaying and checking.
58 keep_image -- Whether to always keep the dumped images or not.
59 db_path -- The path to the objects db or where it will be created.
60 results_path -- The path in which to place the results.
61 download.url -- The URL from which to download the files.
62 download.caching_proxy_url -- The URL of the caching proxy acting as
63 a prefix for download.url
64 download.force -- Forces downloading even if the destination file already
65 exists.
66 download.minio_host -- Name of MinIO server from which to download traces
67 download.minio_bucket -- Name of bucket in MinIO server containing the traces
68 download.role_session_name -- Role session name for authentication with MinIO
69 download.jwt -- JWT token for authentication with MinIO
70 """
72 def __init__(self):
73 self.device_name = None
74 self.keep_image = False
75 self.db_path = None
76 self.results_path = None
77 self.download = {'url': None,
78 'caching_proxy_url': None,
79 'force': False,
80 'minio_host': '',
81 'minio_bucket': '',
82 'role_session_name': '',
83 'jwt': ''}
85 def clear(self):
86 """Reinitialize all values to defaults."""
87 self.__init__()
89 def set_download_url(self, url):
90 """Safely set the parsed download url."""
91 self.download['url'] = _safe_urlparse(url)
93 def set_download_caching_proxy_url(self, url):
94 """Safely set the parsed download caching proxy url."""
95 self.download['caching_proxy_url'] = _safe_urlparse(url)
97 def __iter__(self):
98 for key, values in self.__dict__.items():
99 if not key.startswith('_'):
100 yield key, values
103 OPTIONS = _Options()