cedar: 4.3.0 -> 4.3.1 (#379286)
[NixPkgs.git] / pkgs / by-name / xa / xar / patches / 0001-Update-tests-for-Python-3-and-Nix-sandbox.patch
blob2b26cc1429df2f1ca83b4ff5c2ce085859d41e72
1 From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
2 From: Ivan Trubach <mr.trubach@icloud.com>
3 Date: Sat, 27 Jul 2024 12:53:54 +0300
4 Subject: [PATCH 01/19] Update tests for Python 3 and Nix sandbox
6 This change updates integration tests for Python 3 and fixes some
7 assumptions to work under Nix sandbox (in particular, extended
8 attributes are not allowed).
10 Also updates xar/test/validate.c for modern OpenSSL versions.
11 ---
12 xar/test/attr.py | 54 +++++++++++++++++++-------
13 xar/test/buffer.c | 3 +-
14 xar/test/checksums.py | 75 +++++++++++++++++++-----------------
15 xar/test/compression.py | 27 ++++++++-----
16 xar/test/data.py | 19 +++++----
17 xar/test/hardlink.py | 12 ++++--
18 xar/test/heap.py | 27 +++++++------
19 xar/test/integrity.py | 45 ++++++++++++----------
20 xar/test/run-all.py | 25 ++++++++++++
21 xar/test/util.py | 85 ++++++++++++++++++++++++++++++++++++-----
22 xar/test/validate.c | 32 +++++++++-------
23 11 files changed, 282 insertions(+), 122 deletions(-)
24 create mode 100755 xar/test/run-all.py
26 diff --git a/xar/test/attr.py b/xar/test/attr.py
27 index adc2c56..c28a4e6 100755
28 --- a/xar/test/attr.py
29 +++ b/xar/test/attr.py
30 @@ -6,6 +6,7 @@ import os
31 import os.path
32 import shutil
33 import subprocess
34 +import sys
35 import xattr
37 import util
38 @@ -26,20 +27,27 @@ import util
39 class MissingExtendedAttributeError(AssertionError):
40 pass
42 -def _random_big_data(bytes=65536, path="/dev/random"):
43 +def _random_big_data(bytes=65536):
44 """
45 Returns a random string with the number of bytes requested. Due to xar
46 implementation details, this should be greater than 4096 (32768 for
47 compressed heap testing).
49 """
50 - with open(path, "r") as f:
51 - return f.read(bytes)
52 + return os.urandom(bytes)
54 +def _to_bytes(s):
55 + if isinstance(s, str):
56 + return s.encode("utf-8")
57 + return s
59 def _test_xattr_on_file_with_contents(filename, file_contents, xattrs=[], xar_create_flags=[], xar_extract_flags=[]):
60 + file_contents = _to_bytes(file_contents)
61 + xattr_prefix = "user." if sys.platform != "darwin" else ""
62 + xattrs = [(xattr_prefix + k, _to_bytes(v)) for k, v in xattrs]
63 try:
64 # Write file out
65 - with open(filename, "w") as f:
66 + with open(filename, "wb") as f:
67 f.write(file_contents)
68 for (key, value) in xattrs:
69 xattr.setxattr(f, key, value)
70 @@ -51,14 +59,16 @@ def _test_xattr_on_file_with_contents(filename, file_contents, xattrs=[], xar_cr
71 with util.directory_created("extracted") as directory:
72 # Validate resulting xattrs
73 subprocess.check_call(["xar", "-x", "-C", directory, "-f", path] + xar_extract_flags)
74 + extracted_filename = os.path.join(directory, filename)
75 + expected_set = {key for key, _ in xattrs}
76 + actual_set = set(xattr.listxattr(os.path.join(directory, filename)))
77 + for key in expected_set - actual_set:
78 + raise MissingExtendedAttributeError("extended attribute \"{n}\" missing after extraction".format(n=key))
79 for (key, value) in xattrs:
80 - try:
81 - assert xattr.getxattr(os.path.join(directory, filename), key) == value, "extended attribute \"{n}\" has incorrect contents after extraction".format(n=key)
82 - except KeyError:
83 - raise MissingExtendedAttributeError("extended attribute \"{n}\" missing after extraction".format(n=key))
84 + assert xattr.getxattr(extracted_filename, key) == value, "extended attribute \"{n}\" has incorrect contents after extraction".format(n=key)
86 # Validate file contents
87 - with open(os.path.join(directory, filename), "r") as f:
88 + with open(os.path.join(directory, filename), "rb") as f:
89 if f.read() != file_contents:
90 raise MissingExtendedAttributeError("archived file \"{f}\" has has incorrect contents after extraction".format(f=filename))
91 finally:
92 @@ -73,36 +83,47 @@ def _test_xattr_on_file_with_contents(filename, file_contents, xattrs=[], xar_cr
93 # tests are commented out awaiting a day when this might be different.
95 # def empty_xattr_empty_file(filename):
96 +# util.skip_if_no_xattrs_support()
97 # _test_xattr_on_file_with_contents(filename, "", xattrs=[("foo", "")])
99 def small_xattr_empty_file(filename):
100 + util.skip_if_no_xattrs_support()
101 _test_xattr_on_file_with_contents(filename, "", xattrs=[("foo", "1234")])
103 def large_xattr_empty_file(filename):
104 + util.skip_if_no_xattrs_support()
105 _test_xattr_on_file_with_contents(filename, "", xattrs=[("foo", _random_big_data(5000))])
107 # def empty_xattr_small_file(filename):
108 +# util.skip_if_no_xattrs_support()
109 # _test_xattr_on_file_with_contents(filename, "small.file.contents", xattrs=[("foo", "")])
111 def small_xattr_small_file(filename):
112 + util.skip_if_no_xattrs_support()
113 _test_xattr_on_file_with_contents(filename, "small.file.contents", xattrs=[("foo", "1234")])
115 def large_xattr_small_file(filename):
116 + util.skip_if_no_xattrs_support()
117 _test_xattr_on_file_with_contents(filename, "small.file.contents", xattrs=[("foo", _random_big_data(4567))])
119 # def empty_xattr_large_file(filename):
120 +# util.skip_if_no_xattrs_support()
121 # _test_xattr_on_file_with_contents(filename, _random_big_data(10000000), xattrs=[("foo", "")])
123 def small_xattr_large_file(filename):
124 + util.skip_if_no_xattrs_support()
125 _test_xattr_on_file_with_contents(filename, _random_big_data(5000000), xattrs=[("foo", "1234")])
127 def large_xattr_large_file(filename):
128 + util.skip_if_no_xattrs_support()
129 _test_xattr_on_file_with_contents(filename, _random_big_data(9876543), xattrs=[("foo", _random_big_data(6543))])
131 def multiple_xattrs(filename):
132 + util.skip_if_no_xattrs_support()
133 _test_xattr_on_file_with_contents(filename, "", xattrs=[("foo", "bar"), ("baz", "1234"), ("quux", "more")]) # ("empty", "")
135 def distribution_create(filename):
136 + util.skip_if_no_xattrs_support()
137 try:
138 _test_xattr_on_file_with_contents(filename, "dummy", xattrs=[("foo", "bar")], xar_create_flags=["--distribution"])
139 except MissingExtendedAttributeError:
140 @@ -114,6 +135,7 @@ def distribution_create(filename):
141 # when it can.
143 # def distribution_extract(filename):
144 +# util.skip_if_no_xattrs_support()
145 # try:
146 # _test_xattr_on_file_with_contents(filename, "dummy", xattrs=[("foo", "bar")], xar_extract_flags=["--distribution"])
147 # except MissingExtendedAttributeError:
148 @@ -128,12 +150,18 @@ TEST_CASES = (small_xattr_empty_file, large_xattr_empty_file,
149 multiple_xattrs, distribution_create)
151 if __name__ == "__main__":
152 + failed = False
153 for case in TEST_CASES:
154 + func_name = case.__name__
155 try:
156 - case(case.func_name)
157 - print("PASSED: {f}".format(f=case.func_name))
158 + case(func_name)
159 + print("PASSED: {f}".format(f=func_name))
160 except (AssertionError, IOError, subprocess.CalledProcessError):
161 - import sys, os
162 - print("FAILED: {f}".format(f=case.func_name))
163 + failed = True
164 + print("FAILED: {f}".format(f=func_name))
165 sys.excepthook(*sys.exc_info())
166 print("")
167 + except util.TestCaseSkipError as e:
168 + print("SKIPPED: {f}: {m}".format(f=func_name, m=e))
169 + if failed:
170 + sys.exit(1)
171 diff --git a/xar/test/buffer.c b/xar/test/buffer.c
172 index a353cef..e4c5639 100644
173 --- a/xar/test/buffer.c
174 +++ b/xar/test/buffer.c
175 @@ -1,5 +1,6 @@
176 #include <stdlib.h>
177 #include <stdio.h>
178 +#include <unistd.h>
179 #include <sys/stat.h>
180 #include <sys/fcntl.h>
181 #include <string.h>
182 @@ -50,7 +51,7 @@ int main(int argc, char *argv[])
183 if( red < sb.st_size )
184 fprintf(stderr, "Incomplete read\n");
186 - x = xar_open("/tmp/test.xar", WRITE);
187 + x = xar_open("test.xar", WRITE);
188 if( x == NULL ) {
189 fprintf(stderr, "Error creating xarchive\n");
190 exit(6);
191 diff --git a/xar/test/checksums.py b/xar/test/checksums.py
192 index 7080d7c..0f39e63 100755
193 --- a/xar/test/checksums.py
194 +++ b/xar/test/checksums.py
195 @@ -2,6 +2,7 @@
197 from __future__ import print_function
199 +import contextlib
200 import hashlib
201 import os
202 import os.path
203 @@ -9,6 +10,7 @@ import re
204 import shutil
205 import struct
206 import subprocess
207 +import sys
209 import util
211 @@ -17,15 +19,21 @@ import util
212 # Utility Functions
215 +@contextlib.contextmanager
216 +def _test_archive_created(filename, directory, *args, **kwargs):
217 + with util.test_directory_created(directory) as test_directory:
218 + with util.archive_created(filename, test_directory, *args, **kwargs) as path:
219 + yield path
221 def _get_numeric_value_from_header(archive_name, key):
223 Dumps the header of the specified xar archive and extracts the header
224 size from the output, in bytes.
227 - header = subprocess.check_output(["xar", "--dump-header", "-f", archive_name])
228 + header = subprocess.check_output(["xar", "--dump-header", "-f", archive_name], text=True)
229 for line in header.splitlines():
230 - matchdata = re.match("^(.+):\s+(.+)$", line) # magic: 0x78617221 (OK)
231 + matchdata = re.match(r"^(.+):\s+(.+)$", line) # magic: 0x78617221 (OK)
232 assert matchdata, "unexpected output from `xar --dump-header`:\n{h}".format(h=header)
233 if matchdata.groups()[0] == key:
234 return int(matchdata.groups()[1])
235 @@ -38,17 +46,14 @@ def _get_toc_size(archive_name):
236 return _get_numeric_value_from_header(archive_name, "Compressed TOC length")
238 def _clobber_bytes_at(clobber_range, path):
239 - with open(path, "r+") as f:
240 + with open(path, "rb+") as f:
241 f.seek(clobber_range[0])
242 - with open("/dev/random", "r") as r:
243 - random_bytes = r.read(len(clobber_range))
244 - f.write(random_bytes)
245 + f.write(os.urandom(len(clobber_range)))
247 def _verify_extraction_failed(filename):
248 with util.directory_created("extracted") as directory:
249 try:
250 - with open("/dev/null", "w") as n:
251 - returncode = subprocess.call(["xar", "-x", "-C", directory, "-f", filename], stdout=n, stderr=n)
252 + returncode = subprocess.call(["xar", "-x", "-C", directory, "-f", filename], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
253 assert returncode != 0, "xar reported success extracting an archive with a broken TOC"
254 finally:
255 if os.path.exists(directory):
256 @@ -63,7 +68,7 @@ def _verify_header_checksum(filename, algorithm):
257 header_size = _get_header_size(filename)
258 toc_length = _get_toc_size(filename)
260 - with open(filename, "r") as f:
261 + with open(filename, "rb") as f:
262 f.seek(header_size)
263 h = hashlib.new(algorithm, f.read(toc_length))
264 computed_digest = h.digest()
265 @@ -76,23 +81,23 @@ def _verify_header_checksum(filename, algorithm):
268 def default_toc_checksum_validity(filename):
269 - with util.archive_created(filename, "/bin") as path:
270 + with _test_archive_created(filename, "testdir") as path:
271 _verify_header_checksum(path, "sha1")
273 def sha1_toc_checksum_validity(filename):
274 - with util.archive_created(filename, "/bin", "--toc-cksum", "sha1") as path:
275 + with _test_archive_created(filename, "testdir", "--toc-cksum", "sha1") as path:
276 _verify_header_checksum(path, "sha1")
278 def sha256_toc_checksum_validity(filename):
279 - with util.archive_created(filename, "/bin", "--toc-cksum", "sha256") as path:
280 + with _test_archive_created(filename, "testdir", "--toc-cksum", "sha256") as path:
281 _verify_header_checksum(path, "sha256")
283 def sha512_toc_checksum_validity(filename):
284 - with util.archive_created(filename, "/bin", "--toc-cksum", "sha512") as path:
285 + with _test_archive_created(filename, "testdir", "--toc-cksum", "sha512") as path:
286 _verify_header_checksum(path, "sha512")
288 def broken_toc_default_checksum(filename):
289 - with util.archive_created(filename, "/bin") as path:
290 + with _test_archive_created(filename, "testdir") as path:
291 # Mess up the archive
292 toc_start = _get_header_size(path)
293 _clobber_bytes_at(range(toc_start + 4, toc_start + 4 + 100), path) # Why did the original test specify 4? No idea.
294 @@ -101,7 +106,7 @@ def broken_toc_default_checksum(filename):
295 _verify_extraction_failed(filename)
297 def broken_toc_sha1_checksum(filename):
298 - with util.archive_created(filename, "/bin", "--toc-cksum", "sha1") as path:
299 + with _test_archive_created(filename, "testdir", "--toc-cksum", "sha1") as path:
300 # Mess up the archive
301 toc_start = _get_header_size(path)
302 _clobber_bytes_at(range(toc_start + 4, toc_start + 4 + 100), path) # Why did the original test specify 4? No idea.
303 @@ -110,7 +115,7 @@ def broken_toc_sha1_checksum(filename):
304 _verify_extraction_failed(filename)
306 def broken_toc_sha256_checksum(filename):
307 - with util.archive_created(filename, "/bin", "--toc-cksum", "sha256") as path:
308 + with _test_archive_created(filename, "testdir", "--toc-cksum", "sha256") as path:
309 # Mess up the archive
310 toc_start = _get_header_size(path)
311 _clobber_bytes_at(range(toc_start + 4, toc_start + 4 + 100), path) # Why did the original test specify 4? No idea.
312 @@ -119,7 +124,7 @@ def broken_toc_sha256_checksum(filename):
313 _verify_extraction_failed(filename)
315 def broken_toc_sha512_checksum(filename):
316 - with util.archive_created(filename, "/bin", "--toc-cksum", "sha512") as path:
317 + with _test_archive_created(filename, "testdir", "--toc-cksum", "sha512") as path:
318 # Mess up the archive
319 toc_start = _get_header_size(path)
320 _clobber_bytes_at(range(toc_start + 4, toc_start + 4 + 100), path) # Why did the original test specify 4? No idea.
321 @@ -128,7 +133,7 @@ def broken_toc_sha512_checksum(filename):
322 _verify_extraction_failed(filename)
324 def broken_heap_default_checksum(filename):
325 - with util.archive_created(filename, "/bin") as path:
326 + with _test_archive_created(filename, "testdir") as path:
327 # Mess up the archive
328 toc_start = _get_header_size(path)
329 toc_size = _get_toc_size(path)
330 @@ -139,11 +144,11 @@ def broken_heap_default_checksum(filename):
331 _verify_extraction_failed(filename)
333 def default_checksum_algorithm(filename):
334 - with util.archive_created(filename, "/bin") as path:
335 - header = subprocess.check_output(["xar", "--dump-header", "-f", path])
336 + with _test_archive_created(filename, "testdir") as path:
337 + header = subprocess.check_output(["xar", "--dump-header", "-f", path], text=True)
338 found = False
339 for line in header.splitlines():
340 - matchdata = re.match("^Checksum algorithm:\s+(\d+)\s+\\((\w+)\\)$", line)
341 + matchdata = re.match(r"^Checksum algorithm:\s+(\d+)\s+\((\w+)\)$", line)
342 if not matchdata:
343 continue
344 found = True
345 @@ -156,7 +161,7 @@ def default_checksum_algorithm(filename):
347 # def invalid_checksum_algorithm(filename):
348 # try:
349 -# with util.archive_created(filename, "/bin", "--toc-cksum", "invalid-algorithm") as path:
350 +# with _test_archive_created(filename, "testdir", "--toc-cksum", "invalid-algorithm") as path:
351 # raise AssertionError("xar succeeded when it should have failed")
352 # except subprocess.CalledProcessError:
353 # pass
354 @@ -164,17 +169,15 @@ def default_checksum_algorithm(filename):
355 # It does fail for md5 explicitly, however
356 def md5_toc_checksum_failure(filename):
357 try:
358 - with open("/dev/null", "a") as devnull:
359 - with util.archive_created(filename, "/bin", "--toc-cksum", "md5", stderr=devnull) as path:
360 - raise AssertionError("xar succeeded when it should have failed")
361 + with _test_archive_created(filename, "testdir", "--toc-cksum", "md5", stderr=subprocess.DEVNULL) as path:
362 + raise AssertionError("xar succeeded when it should have failed")
363 except subprocess.CalledProcessError:
364 pass
366 def md5_file_checksum_failure(filename):
367 try:
368 - with open("/dev/null", "a") as devnull:
369 - with util.archive_created(filename, "/bin", "--file-cksum", "md5", stderr=devnull) as path:
370 - raise AssertionError("xar succeeded when it should have failed")
371 + with _test_archive_created(filename, "testdir", "--file-cksum", "md5", stderr=subprocess.DEVNULL) as path:
372 + raise AssertionError("xar succeeded when it should have failed")
373 except subprocess.CalledProcessError:
374 pass
376 @@ -185,8 +188,8 @@ def _verify_checksum_algorithm(filename, algorithm):
377 else:
378 algorithm = "sha1"
380 - with util.archive_created(filename, "/bin", *additional_args) as path:
381 - toc = subprocess.check_output(["xar", "--dump-toc=-", "-f", path])
382 + with _test_archive_created(filename, "testdir", *additional_args) as path:
383 + toc = subprocess.check_output(["xar", "--dump-toc=-", "-f", path], text=True)
384 found = False
385 for line in toc.splitlines():
386 if '<unarchived-checksum style="{a}">'.format(a=algorithm) in line or '<archived-checksum style="{a}">'.format(a=algorithm) in line:
387 @@ -214,12 +217,16 @@ TEST_CASES = (default_toc_checksum_validity, sha1_toc_checksum_validity, sha256_
388 md5_toc_checksum_failure, md5_file_checksum_failure,)
390 if __name__ == "__main__":
391 + failed = False
392 for case in TEST_CASES:
393 + func_name = case.__name__
394 try:
395 - case("{f}.xar".format(f=case.func_name))
396 - print("PASSED: {f}".format(f=case.func_name))
397 + case("{f}.xar".format(f=func_name))
398 + print("PASSED: {f}".format(f=func_name))
399 except (AssertionError, IOError, subprocess.CalledProcessError):
400 - import sys, os
401 - print("FAILED: {f}".format(f=case.func_name))
402 + failed = True
403 + print("FAILED: {f}".format(f=func_name))
404 sys.excepthook(*sys.exc_info())
405 print("")
406 + if failed:
407 + sys.exit(1)
408 diff --git a/xar/test/compression.py b/xar/test/compression.py
409 index 2b3b2ec..7ed30ca 100755
410 --- a/xar/test/compression.py
411 +++ b/xar/test/compression.py
412 @@ -2,10 +2,10 @@
414 from __future__ import print_function
416 -import cStringIO
417 import os
418 import os.path
419 import subprocess
420 +import sys
421 import tempfile
423 import util
424 @@ -16,10 +16,15 @@ import util
427 def _check_compression(filename, *args, **kwargs):
428 - with util.archive_created(filename, "/bin", *args, **kwargs) as path:
429 + with (
430 + util.directory_created("temp") as temp_directory,
431 + util.chdir(temp_directory),
432 + util.test_directory_created("testdir") as test_directory,
433 + util.archive_created(filename, "testdir", *args, **kwargs) as path,
434 + ):
435 with util.directory_created("extracted") as directory:
436 subprocess.check_call(["xar", "-x", "-f", path, "-C", directory])
437 - util.assert_identical_directories("/bin", os.path.join(directory, "bin"))
438 + util.assert_identical_directories(test_directory, os.path.join(directory, "testdir"))
442 @@ -61,14 +66,18 @@ TEST_CASES = (no_compression, default_compression,
443 gzip_compression_short, bzip2_compression_short, lzma_compression_short)
445 if __name__ == "__main__":
446 + failed = False
447 for case in TEST_CASES:
448 + func_name = case.__name__
449 try:
450 - case("{f}.xar".format(f=case.func_name))
451 - print("PASSED: {f}".format(f=case.func_name))
452 + case("{f}.xar".format(f=func_name))
453 + print("PASSED: {f}".format(f=func_name))
454 except (AssertionError, IOError, subprocess.CalledProcessError):
455 - import sys, os
456 - print("FAILED: {f}".format(f=case.func_name))
457 + failed = True
458 + print("FAILED: {f}".format(f=func_name))
459 sys.excepthook(*sys.exc_info())
460 print("")
461 - except util.TestCaseSkipError, e:
462 - print("SKIPPED: {f}: {m}".format(f=case.func_name, m=e.message))
463 + except util.TestCaseSkipError as e:
464 + print("SKIPPED: {f}: {m}".format(f=func_name, m=e))
465 + if failed:
466 + sys.exit(1)
467 diff --git a/xar/test/data.py b/xar/test/data.py
468 index a9793f0..f902b78 100755
469 --- a/xar/test/data.py
470 +++ b/xar/test/data.py
471 @@ -6,6 +6,7 @@ import contextlib
472 import os
473 import os.path
474 import subprocess
475 +import sys
476 import util
479 @@ -28,7 +29,7 @@ def _process_toc(archive_path):
480 subprocess.check_call(["xar", "-f", archive_path, "--dump-toc=data_toc.xml"])
481 try:
482 result = subprocess.check_output(["xsltproc", "-o", "-", os.path.realpath(os.path.join(__file__, "..", "data.xsl")), "data_toc.xml"])
483 - assert result == "", "expected no data offset, but instead found:{o}".format(o=result)
484 + assert result == b"", "expected no data offset, but instead found:{o}".format(o=result)
485 finally:
486 os.unlink("data_toc.xml")
488 @@ -90,14 +91,18 @@ TEST_CASES = (zero_length_default_compression, zero_length_no_compression,
489 mixed_length_gzip_compression, mixed_length_bzip2_compression, mixed_length_lzma_compression)
491 if __name__ == "__main__":
492 + failed = False
493 for case in TEST_CASES:
494 + func_name = case.__name__
495 try:
496 - case("{f}.xar".format(f=case.func_name))
497 - print("PASSED: {f}".format(f=case.func_name))
498 + case("{f}.xar".format(f=func_name))
499 + print("PASSED: {f}".format(f=func_name))
500 except (AssertionError, IOError, subprocess.CalledProcessError):
501 - import sys, os
502 - print("FAILED: {f}".format(f=case.func_name))
503 + failed = True
504 + print("FAILED: {f}".format(f=func_name))
505 sys.excepthook(*sys.exc_info())
506 print("")
507 - except util.TestCaseSkipError, e:
508 - print("SKIPPED: {f}: {m}".format(f=case.func_name, m=e.message))
509 + except util.TestCaseSkipError as e:
510 + print("SKIPPED: {f}: {m}".format(f=func_name, m=e))
511 + if failed:
512 + sys.exit(1)
513 diff --git a/xar/test/hardlink.py b/xar/test/hardlink.py
514 index 5145216..da409d6 100755
515 --- a/xar/test/hardlink.py
516 +++ b/xar/test/hardlink.py
517 @@ -5,6 +5,7 @@ from __future__ import print_function
518 import os
519 import os.path
520 import subprocess
521 +import sys
523 import util
525 @@ -58,12 +59,17 @@ def hard_link_identical_files(filename):
526 TEST_CASES = (hard_link_in_directory, hard_link_in_cwd, hard_link_identical_files)
528 if __name__ == "__main__":
529 + failed = False
530 for case in TEST_CASES:
531 + func_name = case.__name__
532 try:
533 - case("{f}.xar".format(f=case.func_name))
534 - print("PASSED: {f}".format(f=case.func_name))
535 + case("{f}.xar".format(f=func_name))
536 + print("PASSED: {f}".format(f=func_name))
537 except (AssertionError, IOError, subprocess.CalledProcessError):
538 + failed = True
539 import sys, os
540 - print("FAILED: {f}".format(f=case.func_name))
541 + print("FAILED: {f}".format(f=func_name))
542 sys.excepthook(*sys.exc_info())
543 print("")
544 + if failed:
545 + sys.exit(1)
546 diff --git a/xar/test/heap.py b/xar/test/heap.py
547 index f431c77..727412a 100755
548 --- a/xar/test/heap.py
549 +++ b/xar/test/heap.py
550 @@ -8,6 +8,7 @@ import os.path
551 import re
552 import shutil
553 import subprocess
554 +import sys
556 import util
558 @@ -19,8 +20,8 @@ import util
559 def _file_offsets_for_archive(path, xsl_path):
560 subprocess.check_call(["xar", "--dump-toc=heap_toc.xml", "-f", path])
561 try:
562 - offsets = subprocess.check_output(["xsltproc", xsl_path, "heap_toc.xml"])
563 - matches = [re.match("^(.+)\s([^\s]+)$", offset) for offset in offsets.splitlines()]
564 + offsets = subprocess.check_output(["xsltproc", xsl_path, "heap_toc.xml"], text=True)
565 + matches = [re.match(r"^(.+)\s([^\s]+)$", offset) for offset in offsets.splitlines()]
566 offsets = [(match.groups()[0], int(match.groups()[1])) for match in matches]
567 return offsets
568 finally:
569 @@ -33,9 +34,8 @@ def _file_offsets_for_archive(path, xsl_path):
570 XSL_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "heap1.xsl")
572 def normal_heap(filename):
573 - with util.directory_created("scratch") as directory:
574 - shutil.copy("/bin/ls", os.path.join(directory, "ls"))
575 - shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo"))
576 + with util.test_directory_created("scratch") as directory:
577 + shutil.copy(os.path.join(directory, "script"), os.path.join(directory, "foo"))
578 with util.chdir(directory):
579 with util.archive_created(os.path.join("..", "heap.xar"), ".") as path:
580 # Verify file offsets are as we expect
581 @@ -50,9 +50,8 @@ def normal_heap(filename):
582 subprocess.check_call(["xar", "-x", "-f", path, "-C", extracted])
584 def coalesce_heap(filename):
585 - with util.directory_created("scratch") as directory:
586 - shutil.copy("/bin/ls", os.path.join(directory, "ls"))
587 - shutil.copy(os.path.join(directory, "ls"), os.path.join(directory, "foo"))
588 + with util.test_directory_created("scratch") as directory:
589 + shutil.copy(os.path.join(directory, "script"), os.path.join(directory, "foo"))
590 with util.chdir(directory):
591 with util.archive_created(os.path.join("..", "heap.xar"), ".", "--coalesce-heap") as path:
592 # Verify file offsets are as we expect
593 @@ -67,12 +66,16 @@ def coalesce_heap(filename):
594 TEST_CASES = (normal_heap, coalesce_heap)
596 if __name__ == "__main__":
597 + failed = False
598 for case in TEST_CASES:
599 + func_name = case.__name__
600 try:
601 - case("{f}.xar".format(f=case.func_name))
602 - print("PASSED: {f}".format(f=case.func_name))
603 + case("{f}.xar".format(f=func_name))
604 + print("PASSED: {f}".format(f=func_name))
605 except (AssertionError, IOError, subprocess.CalledProcessError):
606 - import sys, os
607 - print("FAILED: {f}".format(f=case.func_name))
608 + failed = True
609 + print("FAILED: {f}".format(f=func_name))
610 sys.excepthook(*sys.exc_info())
611 print("")
612 + if failed:
613 + sys.exit(1)
614 diff --git a/xar/test/integrity.py b/xar/test/integrity.py
615 index c47ac6a..f4d2af7 100755
616 --- a/xar/test/integrity.py
617 +++ b/xar/test/integrity.py
618 @@ -5,6 +5,7 @@ from __future__ import print_function
619 import os
620 import os.path
621 import subprocess
622 +import sys
624 import util
626 @@ -12,9 +13,9 @@ import util
627 # Utility Functions
630 -def _test_truncation(filename, path_to_be_archived, bytes_to_chop, *args):
631 - with util.archive_created(filename, path_to_be_archived) as path:
632 - with open("/dev/null", "w") as bitbucket:
633 +def _test_truncation(filename, bytes_to_chop, *args):
634 + with util.test_directory_created("testdir") as test_directory:
635 + with util.archive_created(filename, test_directory) as path:
636 size = os.stat(path).st_size
637 while size > 0:
638 last_size = size
639 @@ -23,7 +24,7 @@ def _test_truncation(filename, path_to_be_archived, bytes_to_chop, *args):
640 f.truncate(size)
642 with util.directory_created("scratch") as directory:
643 - returncode = subprocess.call(["xar", "-x", "-f", path, "-C", directory], stderr=bitbucket)
644 + returncode = subprocess.call(["xar", "-x", "-f", path, "-C", directory], stderr=subprocess.DEVNULL)
645 assert returncode != 0, "xar claimed to succeed when extracting a truncated archive"
648 @@ -31,42 +32,42 @@ def _test_truncation(filename, path_to_be_archived, bytes_to_chop, *args):
651 def large_uncompressed(filename):
652 - _test_truncation(filename, "/usr/share/man/man1", 1024 * 1024, "--compression=none")
653 + _test_truncation(filename, 1024 * 1024, "--compression=none")
655 def large_default_compression(filename):
656 - _test_truncation(filename, "/usr/share/man/man1", 1024 * 1024)
657 + _test_truncation(filename, 1024 * 1024)
659 def large_gzip_compressed(filename):
660 util.skip_if_no_compression_support("gzip")
661 - _test_truncation(filename, "/usr/share/man/man1", 1024 * 1024, "--compression=gzip")
662 + _test_truncation(filename, 1024 * 1024, "--compression=gzip")
664 def large_bzip2_compressed(filename):
665 util.skip_if_no_compression_support("bzip2")
666 - _test_truncation(filename, "/usr/share/man/man1", 1024 * 1024, "--compression=bzip2")
667 + _test_truncation(filename, 1024 * 1024, "--compression=bzip2")
669 def large_lzma_compressed(filename):
670 util.skip_if_no_compression_support("lzma")
671 - _test_truncation(filename, "/usr/share/man/man1", 1024 * 1024, "--compression=lzma")
672 + _test_truncation(filename, 1024 * 1024, "--compression=lzma")
674 # "small" variants use a non-base-2 size to try to catch issues that occur on uneven boundaries
676 def small_uncompressed(filename):
677 - _test_truncation(filename, "/bin", 43651, "--compression=none")
678 + _test_truncation(filename, 43651, "--compression=none")
680 def small_default_compression(filename):
681 - _test_truncation(filename, "/bin", 43651)
682 + _test_truncation(filename, 43651)
684 def small_gzip_compressed(filename):
685 util.skip_if_no_compression_support("gzip")
686 - _test_truncation(filename, "/bin", 43651, "--compression=gzip")
687 + _test_truncation(filename, 43651, "--compression=gzip")
689 def small_bzip2_compressed(filename):
690 util.skip_if_no_compression_support("bzip2")
691 - _test_truncation(filename, "/bin", 43651, "--compression=bzip2")
692 + _test_truncation(filename, 43651, "--compression=bzip2")
694 def small_lzma_compressed(filename):
695 util.skip_if_no_compression_support("lzma")
696 - _test_truncation(filename, "/bin", 43651, "--compression=lzma")
697 + _test_truncation(filename, 43651, "--compression=lzma")
700 TEST_CASES = (large_uncompressed, large_default_compression,
701 @@ -75,14 +76,18 @@ TEST_CASES = (large_uncompressed, large_default_compression,
702 small_gzip_compressed, small_bzip2_compressed, small_lzma_compressed)
704 if __name__ == "__main__":
705 + failed = False
706 for case in TEST_CASES:
707 + func_name = case.__name__
708 try:
709 - case("{f}.xar".format(f=case.func_name))
710 - print("PASSED: {f}".format(f=case.func_name))
711 + case("{f}.xar".format(f=func_name))
712 + print("PASSED: {f}".format(f=func_name))
713 except (AssertionError, IOError, subprocess.CalledProcessError):
714 - import sys, os
715 - print("FAILED: {f}".format(f=case.func_name))
716 + failed = True
717 + print("FAILED: {f}".format(f=func_name))
718 sys.excepthook(*sys.exc_info())
719 print("")
720 - except util.TestCaseSkipError, e:
721 - print("SKIPPED: {f}: {m}".format(f=case.func_name, m=e.message))
722 + except util.TestCaseSkipError as e:
723 + print("SKIPPED: {f}: {m}".format(f=func_name, m=e))
724 + if failed:
725 + sys.exit(1)
726 diff --git a/xar/test/run-all.py b/xar/test/run-all.py
727 new file mode 100755
728 index 0000000..05e3054
729 --- /dev/null
730 +++ b/xar/test/run-all.py
731 @@ -0,0 +1,25 @@
732 +#!/usr/bin/env python3
734 +import os.path
735 +import subprocess
736 +import sys
738 +test_suites = [
739 + "attr.py",
740 + "checksums.py",
741 + "compression.py",
742 + "data.py",
743 + "hardlink.py",
744 + "heap.py",
745 + "integrity.py",
748 +test_path = os.path.dirname(__file__)
750 +failed = False
751 +for suite in test_suites:
752 + p = subprocess.run([sys.executable, "--", os.path.join(test_path, suite)])
753 + if p.returncode:
754 + failed = True
755 +if failed:
756 + sys.exit(1)
757 diff --git a/xar/test/util.py b/xar/test/util.py
758 index da79925..423dd3c 100644
759 --- a/xar/test/util.py
760 +++ b/xar/test/util.py
761 @@ -1,6 +1,8 @@
762 #!/usr/bin/env python
764 import contextlib
765 +import errno
766 +import functools
767 import hashlib
768 import os
769 import os.path
770 @@ -13,16 +15,65 @@ import xattr
771 class TestCaseSkipError(Exception):
772 pass
774 +@functools.cache
775 +def _check_xattrs_supported():
776 + """
777 + Returns True if the filesystem supports extended attributes.
778 + """
779 + with directory_created("empty") as directory:
780 + try:
781 + xattr.setxattr(directory, "user.xattrcheck", b"supported")
782 + return True
783 + except OSError as e:
784 + if e.errno != errno.ENOTSUP:
785 + raise
786 + return False
788 +def skip_if_no_xattrs_support():
789 + """
790 + Raises TestCaseSkipError if the the filesystem does not support extended
791 + attributes.
792 + """
793 + if not _check_xattrs_supported():
794 + raise TestCaseSkipError("filesystem does not support extended attributes")
796 +@functools.cache
797 +def _check_compression_supported(type):
798 + """
799 + Returns True if xar has support for the given compression type compiled
800 + in. This function performs a runtime check that tries to compress data
801 + with the given compression type and looks for a known error string. It
802 + ignores all other errors.
803 + """
804 + supported = True
805 + with directory_created("empty") as directory:
806 + archive_path = f"{type}_compression_check.xar"
807 + try:
808 + return f"{type} support not compiled in." not in subprocess.run(
810 + "xar",
811 + "-c",
812 + "-f",
813 + archive_path,
814 + "--compression=" + type,
815 + directory,
816 + ],
817 + stdout=subprocess.PIPE,
818 + text=True,
819 + ).stdout
820 + except:
821 + # Assume that this compression type is supported.
822 + pass
823 + finally:
824 + if os.path.exists(archive_path):
825 + os.unlink(archive_path)
826 + return supported
828 def skip_if_no_compression_support(type):
830 - Raises TestCaseSkipError if the type is "lzma" and the test is running on
831 - darwin (OS X). In the future, we should add a hidden debugging flag to xar
832 - to determine valid compression types. This will skip incorrectly if a
833 - custom xar is used on OS X, or if a custom xar on another platform is
834 - built without bzip2 or lzma.
836 + Raises TestCaseSkipError if the compression type is not compiled in.
838 - if sys.platform == "darwin" and type == "lzma":
839 + if not _check_compression_supported(type):
840 raise TestCaseSkipError("{t} support not compiled in".format(t=type))
842 @contextlib.contextmanager
843 @@ -43,6 +94,22 @@ def directory_created(directory_path):
844 if os.path.exists(directory_path):
845 shutil.rmtree(directory_path)
847 +@contextlib.contextmanager
848 +def test_directory_created(directory_path):
849 + """
850 + Like directory_created, but populates the directory with test files.
851 + """
852 + with directory_created(directory_path) as directory:
853 + with open(os.path.join(directory, "script"), "w+", opener=lambda path, flags: os.open(path, flags, 0o750)) as f:
854 + f.write("#!/bin/sh\necho hello world")
855 + with open(os.path.join(directory, "random_1kb"), "wb+") as f:
856 + f.write(os.urandom(1000))
857 + with open(os.path.join(directory, "random_4kib"), "wb+") as f:
858 + f.write(os.urandom(4096))
859 + with open(os.path.join(directory, "random_1mb"), "wb+") as f:
860 + f.write(os.urandom(9999999))
861 + yield directory
863 @contextlib.contextmanager
864 def archive_created(archive_path, content_path, *extra_args, **extra_kwargs):
866 @@ -68,7 +135,7 @@ def archive_created(archive_path, content_path, *extra_args, **extra_kwargs):
867 HASH_CHUNK_SIZE = 32768
869 def _md5_path(path):
870 - with open(path, "r") as f:
871 + with open(path, "rb") as f:
872 h = hashlib.md5()
873 while True:
874 last = f.read(HASH_CHUNK_SIZE)
875 @@ -122,7 +189,7 @@ def assert_identical_directories(path1, path2):
877 # Sizes and the like
878 assert stat1.st_size == stat2.st_size, "size mismatch for \"{e1}\" ({s1}) and \"{e2}\" ({s2})".format(e1=entry1, s1=stat1.st_size, e2=entry2, s2=stat2.st_size)
879 - assert stat1.st_mtime == stat2.st_mtime, "mtime mismatch for \"{e1}\" and \"{e2}\"".format(e1=entry1, e2=entry2)
880 + assert int(stat1.st_mtime) == int(stat2.st_mtime), "mtime mismatch for \"{e1}\" and \"{e2}\"".format(e1=entry1, e2=entry2)
881 assert _md5_path(entry1) == _md5_path(entry2), "md5 hash mismatch for \"{e1}\" and \"{e2}\"".format(e1=entry1, e2=entry2)
882 if os.path.isdir(entry1):
883 assert_identical_directories(entry1, entry2)
884 diff --git a/xar/test/validate.c b/xar/test/validate.c
885 index dfe69eb..a5fbe37 100644
886 --- a/xar/test/validate.c
887 +++ b/xar/test/validate.c
888 @@ -16,37 +16,40 @@
890 off_t HeapOff = 0;
892 -static char* xar_format_md5(const unsigned char* m) {
893 +static char* xar_format_sha1(const unsigned char* m) {
894 char* result = NULL;
895 asprintf(&result,
896 "%02x%02x%02x%02x"
897 "%02x%02x%02x%02x"
898 "%02x%02x%02x%02x"
899 + "%02x%02x%02x%02x"
900 "%02x%02x%02x%02x",
901 m[0], m[1], m[2], m[3],
902 m[4], m[5], m[6], m[7],
903 m[8], m[9], m[10], m[11],
904 - m[12], m[13], m[14], m[15]);
905 + m[12], m[13], m[14], m[15],
906 + m[16], m[17], m[18], m[19]);
907 return result;
910 void heap_check(int fd, const char *name, const char *prop, off_t offset, off_t length, const char *csum) {
911 char *buf;
912 - EVP_MD_CTX ctx;
913 + EVP_MD_CTX *ctx;
914 const EVP_MD *md;
915 - unsigned char md5str[EVP_MAX_MD_SIZE];
916 + unsigned char sha1str[EVP_MAX_MD_SIZE];
917 unsigned int len;
918 ssize_t r;
919 - char *formattedmd5;
920 + char *formattedsha1;
922 fprintf(stderr, "Heap checking %s %s at offset: %" PRIu64 "\n", name, prop, HeapOff+offset);
923 OpenSSL_add_all_digests();
924 - md = EVP_get_digestbyname("md5");
925 + md = EVP_get_digestbyname("sha1");
926 if( md == NULL ) {
927 - fprintf(stderr, "No md5 digest in openssl\n");
928 + fprintf(stderr, "No sha1 digest in openssl\n");
929 exit(1);
931 - EVP_DigestInit(&ctx, md);
932 + ctx = EVP_MD_CTX_create();
933 + EVP_DigestInit(ctx, md);
935 buf = malloc(length);
936 if( !buf ) {
937 @@ -65,14 +68,15 @@ void heap_check(int fd, const char *name, const char *prop, off_t offset, off_t
938 fprintf(stderr, "Error reading from the heap\n");
939 exit(1);
941 - EVP_DigestUpdate(&ctx, buf, length);
942 - EVP_DigestFinal(&ctx, md5str, &len);
943 + EVP_DigestUpdate(ctx, buf, length);
944 + EVP_DigestFinal(ctx, sha1str, &len);
945 + EVP_MD_CTX_destroy(ctx);
947 - formattedmd5 = xar_format_md5(md5str);
948 - if( strcmp(formattedmd5, csum) != 0 ) {
949 - fprintf(stderr, "%s %s checksum does not match\n", name, prop);
950 + formattedsha1 = xar_format_sha1(sha1str);
951 + if( strcmp(formattedsha1, csum) != 0 ) {
952 + fprintf(stderr, "%s %s checksum does not match (got %s but expected %s)\n", name, prop, formattedsha1, csum);
954 - free(formattedmd5);
955 + free(formattedsha1);
956 free(buf);
960 2.44.1