Work around missing seriesIndex API by using keyword=index
[python-iview.git] / iview / utils.py
blobeb172d969af489c5af076631e37a68f1acb2ceac
1 import zlib
2 from io import BufferedIOBase
3 from io import SEEK_CUR, SEEK_END
4 import urllib.request
5 import http.client
6 from errno import EPIPE, ESHUTDOWN, ENOTCONN, ECONNRESET
7 import builtins
8 from urllib.parse import urlsplit
10 py3p3_exceptions = ("ConnectionError", "ConnectionRefusedError",
11 "ConnectionAbortedError")
12 for name in py3p3_exceptions:
13 if not hasattr(builtins, name): # Python < 3.3
14 class DummyException(EnvironmentError):
15 pass
16 globals()[name] = DummyException
18 DISCONNECTION_ERRNOS = {EPIPE, ESHUTDOWN, ENOTCONN, ECONNRESET}
20 def xml_text_elements(parent, namespace=""):
21 """Extracts text from Element Tree into a dict()
23 Each key is the tag name of a child of the given parent element, and
24 the value is the text of that child. Only tags with no attributes are
25 included. If the "namespace" parameter is given, it should specify an
26 XML namespace enclosed in curly brackets {. . .}, and only tags in
27 that namespace are included."""
29 d = dict()
30 for child in parent:
31 if child.tag.startswith(namespace) and not child.keys():
32 tag = child.tag[len(namespace):]
33 d[tag] = child.text or ""
34 return d
36 def read_int(stream, size):
37 bytes = read_strict(stream, size)
38 return int.from_bytes(bytes, "big")
40 def read_string(stream):
41 buf = bytearray()
42 while True:
43 b = read_strict(stream, 1)
44 if not ord(b):
45 return buf
46 buf.extend(b)
48 def read_strict(stream, size):
49 data = stream.read(size)
50 if len(data) != size:
51 raise EOFError()
52 return data
54 class CounterWriter(BufferedIOBase):
55 def __init__(self, output):
56 self.length = 0
57 self.output = output
58 def write(self, b):
59 self.length += len(b)
60 return self.output.write(b)
61 def tell(self):
62 return self.length
64 class ZlibDecompressorWriter(BufferedIOBase):
65 def __init__(self, output, *pos, buffer_size=0x10000, **kw):
66 self.output = output
67 self.buffer_size = buffer_size
68 self.decompressor = zlib.decompressobj(*pos, **kw)
69 def write(self, b):
70 while b:
71 data = self.decompressor.decompress(b, self.buffer_size)
72 self.output.write(data)
73 b = self.decompressor.unconsumed_tail
74 def close(self):
75 self.decompressor.flush()
77 class TeeWriter(BufferedIOBase):
78 def __init__(self, *outputs):
79 self.outputs = outputs
80 def write(self, b):
81 for output in self.outputs:
82 output.write(b)
84 def streamcopy(input, output, length):
85 assert length >= 0
86 while length:
87 chunk = read_strict(input, min(length, 0x10000))
88 output.write(chunk)
89 length -= len(chunk)
91 def fastforward(stream, offset):
92 assert offset >= 0
93 if stream.seekable():
94 pos = stream.seek(offset, SEEK_CUR)
95 if pos > stream.seek(0, SEEK_END):
96 raise EOFError()
97 stream.seek(pos)
98 else:
99 while offset:
100 chunk = read_strict(stream, min(offset, 0x10000))
101 offset -= len(chunk)
103 class WritingReader(BufferedIOBase):
104 """Filter for a reader stream that writes the data read to another stream
106 def __init__(self, reader, writer):
107 self.reader = reader
108 self.writer = writer
109 def read(self, n):
110 data = self.reader.read(n)
111 self.writer.write(data)
112 return data
114 def setitem(dict, key):
115 """Decorator that adds the definition to a dictionary with a given key"""
116 def decorator(func):
117 dict[key] = func
118 return func
119 return decorator
121 class PersistentConnectionHandler(urllib.request.BaseHandler):
122 """URL handler for HTTP persistent connections
124 connection = PersistentConnectionHandler()
125 session = urllib.request.build_opener(connection)
127 # First request opens connection
128 with session.open("http://localhost/one") as response:
129 response.read()
131 # Subsequent requests reuse the existing connection, unless it got closed
132 with session.open("http://localhost/two") as response:
133 response.read()
135 # Closes old connection when new host specified
136 with session.open("http://example/three") as response:
137 response.read()
139 connection.close() # Frees socket
141 Currently does not reuse an existing connection if
142 two host names happen to resolve to the same Internet address.
145 conn_classes = {
146 "http": http.client.HTTPConnection,
147 "https": http.client.HTTPSConnection,
150 def __init__(self, *pos, **kw):
151 self._type = None
152 self._host = None
153 self._pos = pos
154 self._kw = kw
155 self._connection = None
157 def default_open(self, req):
158 if req.type not in self.conn_classes:
159 return None
161 if req.type != self._type or req.host != self._host:
162 if self._connection:
163 self._connection.close()
164 conn_class = self.conn_classes[req.type]
165 self._connection = conn_class(req.host, *self._pos, **self._kw)
166 self._type = req.type
167 self._host = req.host
169 headers = dict(req.header_items())
170 self._attempt_request(req, headers)
171 try:
172 try:
173 response = self._connection.getresponse()
174 except EnvironmentError as err: # Python < 3.3 compatibility
175 if err.errno not in DISCONNECTION_ERRNOS:
176 raise
177 raise http.client.BadStatusLine(err) from err
178 except (ConnectionError, http.client.BadStatusLine):
179 idempotents = {
180 "GET", "HEAD", "PUT", "DELETE", "TRACE", "OPTIONS"}
181 if req.get_method() not in idempotents:
182 raise
183 # Retry requests whose method indicates they are idempotent
184 self._connection.close()
185 response = None
186 else:
187 if response.status == http.client.REQUEST_TIMEOUT:
188 # Server indicated it did not handle request
189 response = None
190 if not response:
191 # Retry request
192 self._attempt_request(req, headers)
193 response = self._connection.getresponse()
195 # Odd impedance mismatch between "http.client" and "urllib.request"
196 response.msg = response.reason
197 # HTTPResponse secretly already has a geturl() method, but needs a
198 # "url" attribute to be set
199 response.url = "{}://{}{}".format(req.type, req.host, req.selector)
200 return response
202 def _attempt_request(self, req, headers):
203 """Send HTTP request, ignoring broken pipe and similar errors"""
204 try:
205 self._connection.request(req.get_method(), req.selector,
206 req.data, headers)
207 except (ConnectionRefusedError, ConnectionAbortedError):
208 raise # Assume connection was not established
209 except ConnectionError:
210 pass # Continue and read server response if available
211 except EnvironmentError as err: # Python < 3.3 compatibility
212 if err.errno not in DISCONNECTION_ERRNOS:
213 raise
215 def close(self):
216 if self._connection:
217 self._connection.close()
219 def __enter__(self):
220 return self
221 def __exit__(self, *exc):
222 self.close()
224 def http_get(session, url, types=None, *, headers=dict(), **kw):
225 headers = dict(headers)
226 if types is not None:
227 headers["Accept"] = ", ".join(types)
228 req = urllib.request.Request(url, headers=headers, **kw)
229 response = session.open(req)
230 try:
231 # Content negotiation does not make sense with local files
232 if urlsplit(response.geturl()).scheme != "file":
233 headers = response.info()
234 headers.set_default_type(None)
235 type = headers.get_content_type()
236 if types is not None and type not in types:
237 msg = "Unexpected content type {}"
238 raise TypeError(msg.format(type))
239 return response
240 except:
241 response.close()
242 raise
244 def encodeerrors(text, textio, errors="replace"):
245 """Prepare a string with a fallback encoding error handler
247 If the string is not encodable to the output stream,
248 the string is passed through a codec error handler."""
250 encoding = getattr(textio, "encoding", None)
251 if encoding is None:
252 # TextIOBase, and therefore StringIO, etc,
253 # have an "encoding" attribute,
254 # despite not doing any encoding
255 return text
257 try:
258 text.encode(encoding, textio.errors or "strict")
259 except UnicodeEncodeError:
260 text = text.encode(encoding, errors).decode(encoding)
261 return text