switch to 64-bit default run paths
[unleashed-pkg5.git] / src / pull.py
blobb20275dbe740c9a81b834ad5f1888b685f3b5231
1 #!/usr/bin/python2.7
3 # CDDL HEADER START
5 # The contents of this file are subject to the terms of the
6 # Common Development and Distribution License (the "License").
7 # You may not use this file except in compliance with the License.
9 # You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 # or http://www.opensolaris.org/os/licensing.
11 # See the License for the specific language governing permissions
12 # and limitations under the License.
14 # When distributing Covered Code, include this CDDL HEADER in each
15 # file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 # If applicable, add the following below this CDDL HEADER, with the
17 # fields enclosed by brackets "[]" replaced with your own identifying
18 # information: Portions Copyright [yyyy] [name of copyright owner]
20 # CDDL HEADER END
24 # Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved.
27 import calendar
28 import errno
29 import getopt
30 import gettext
31 import locale
32 import os
33 import shutil
34 import sys
35 import tempfile
36 import traceback
37 import urllib
38 import warnings
40 import pkg.catalog as catalog
41 import pkg.client.progress as progress
42 import pkg.fmri
43 import pkg.manifest as manifest
44 import pkg.client.api_errors as apx
45 import pkg.client.pkgdefs as pkgdefs
46 import pkg.client.publisher as publisher
47 import pkg.client.transport.transport as transport
48 import pkg.misc as misc
49 import pkg.p5p
50 import pkg.pkgsubprocess as subprocess
51 import pkg.publish.transaction as trans
52 import pkg.server.repository as sr
53 import pkg.version as version
55 from pkg.client import global_settings
56 from pkg.misc import emsg, get_pkg_otw_size, msg, PipeError
57 from pkg.client.debugvalues import DebugValues
59 # Globals
60 archive = False
61 cache_dir = None
62 src_cat = None
63 download_start = False
64 tmpdirs = []
65 temp_root = None
66 xport = None
67 xport_cfg = None
68 dest_xport = None
69 targ_pub = None
70 target = None
72 def error(text):
73 """Emit an error message prefixed by the command name """
75 # If we get passed something like an Exception, we can convert
76 # it down to a string.
77 text = str(text)
79 # If the message starts with whitespace, assume that it should come
80 # *before* the command-name prefix.
81 text_nows = text.lstrip()
82 ws = text[:len(text) - len(text_nows)]
84 # This has to be a constant value as we can't reliably get our actual
85 # program name on all platforms.
86 emsg(ws + "pkgrecv: " + text_nows)
88 def usage(usage_error=None, retcode=2):
89 """Emit a usage message and optionally prefix it with a more specific
90 error message. Causes program to exit."""
92 if usage_error:
93 error(usage_error)
95 msg(_("""\
96 Usage:
97 pkgrecv [-aknrv] [-s src_uri] [-d (path|dest_uri)] [-c cache_dir]
98 [-m match] [--raw] [--key src_key --cert src_cert]
99 [--dkey dest_key --dcert dest_cert]
100 (fmri|pattern) ...
101 pkgrecv [-s src_repo_uri] --newest
102 pkgrecv [-nv] [-s src_repo_uri] [-d path] [-p publisher ...]
103 [--key src_key --cert src_cert] --clone
105 Options:
106 -a Store the retrieved package data in a pkg(5) archive
107 at the location specified by -d. The file may not
108 already exist, and this option may only be used with
109 filesystem-based destinations.
111 -c cache_dir The path to a directory that will be used to cache
112 downloaded content. If one is not supplied, the
113 client will automatically pick a cache directory.
114 In the case where a download is interrupted, and a
115 cache directory was automatically chosen, use this
116 option to resume the download.
118 -d path_or_uri The filesystem path or URI of the target repository to
119 republish packages to. The target must already exist.
120 New repositories can be created using pkgrepo(1).
122 -h Display this usage message.
124 -k Keep the retrieved package content compressed, ignored
125 when republishing. Should not be used with pkgsend.
127 -m match Controls matching behaviour using the following values:
128 all-timestamps (default)
129 includes all matching timestamps (implies
130 all-versions)
131 all-versions
132 includes all matching versions
133 latest
134 includes only the latest version of each package
136 -n Perform a trial run with no changes made.
138 -v Display verbose output.
140 -p publisher Only clone the given publisher. Can be specified
141 multiple times. Only valid with --clone.
143 -r Recursively evaluates all dependencies for the provided
144 list of packages and adds them to the list.
146 -s src_repo_uri A URI representing the location of a pkg(5)
147 repository to retrieve package data from.
149 --clone Make an exact copy of the source repository. By default,
150 the clone operation will only succeed if publishers in
151 the source repository are also present in the
152 destination. By using -p, the operation can be limited
153 to specific publishers which will be added to the
154 destination repository if not already present.
155 Packages in the destination repository which are not in
156 the source will be removed.
157 Cloning will leave the destination repository altered in
158 case of an error.
160 --newest List the most recent versions of the packages available
161 from the specified repository and exit. (All other
162 options except -s will be ignored.)
164 --raw Retrieve and store the raw package data in a set of
165 directory structures by stem and version at the location
166 specified by -d. May only be used with filesystem-
167 based destinations. This can be used with pkgsend(1)
168 include to conveniently modify and republish packages,
169 perhaps by correcting file contents or providing
170 additional package metadata.
172 --key src_key Specify a client SSL key file to use for pkg retrieval.
174 --cert src_cert Specify a client SSL certificate file to use for pkg
175 retrieval.
177 --dkey dest_key Specify a client SSL key file to use for pkg
178 publication.
180 --dcert dest_cert Specify a client SSL certificate file to use for pkg
181 publication.
183 Environment:
184 PKG_DEST Destination directory or URI
185 PKG_SRC Source URI or path"""))
186 sys.exit(retcode)
188 def cleanup(caller_error=False):
189 """To be called at program finish."""
191 for d in tmpdirs:
192 # If the cache_dir is in the list of directories that should
193 # be cleaned up, but we're exiting with an error, then preserve
194 # the directory so downloads may be resumed.
195 if d == cache_dir and caller_error and download_start:
196 error(_("\n\nCached files were preserved in the "
197 "following directory:\n\t{0}\nUse pkgrecv -c "
198 "to resume the interrupted download.").format(
199 cache_dir))
200 continue
201 shutil.rmtree(d, ignore_errors=True)
203 if caller_error and dest_xport and targ_pub and not archive:
204 try:
205 dest_xport.publish_refresh_packages(targ_pub)
206 except apx.TransportError:
207 # If this fails, ignore it as this was a last ditch
208 # attempt anyway.
209 pass
211 def abort(err=None, retcode=1):
212 """To be called when a fatal error is encountered."""
214 if err:
215 # Clear any possible output first.
216 msg("")
217 error(err)
219 cleanup(caller_error=True)
220 sys.exit(retcode)
222 def get_tracker():
223 try:
224 progresstracker = \
225 progress.FancyUNIXProgressTracker()
226 except progress.ProgressTrackerException:
227 progresstracker = progress.CommandLineProgressTracker()
228 progresstracker.set_major_phase(progresstracker.PHASE_UTILITY)
229 return progresstracker
231 def get_manifest(pfmri, xport_cfg, contents=False):
233 m = None
234 pkgdir = xport_cfg.get_pkg_dir(pfmri)
235 mpath = xport_cfg.get_pkg_pathname(pfmri)
237 if not os.path.exists(mpath):
238 m = xport.get_manifest(pfmri)
239 else:
240 # A FactoredManifest is used here to reduce peak memory
241 # usage (notably when -r was specified).
242 try:
243 m = manifest.FactoredManifest(pfmri, pkgdir)
244 except:
245 abort(err=_("Unable to parse manifest '{mpath}' for "
246 "package '{pfmri}'").format(**locals()))
248 if contents:
249 return m.tostr_unsorted()
250 return m
252 def expand_fmri(pfmri, constraint=version.CONSTRAINT_AUTO):
253 """Find matching fmri using CONSTRAINT_AUTO cache for performance.
254 Returns None if no matching fmri is found."""
255 if isinstance(pfmri, str):
256 pfmri = pkg.fmri.PkgFmri(pfmri)
258 # Iterate in reverse so newest version is evaluated first.
259 versions = [e for e in src_cat.fmris_by_version(pfmri.pkg_name)]
260 for v, fmris in reversed(versions):
261 for f in fmris:
262 if not pfmri.version or \
263 f.version.is_successor(pfmri.version, constraint):
264 return f
265 return
267 def get_dependencies(fmri_list, xport_cfg, tracker):
269 old_limit = sys.getrecursionlimit()
270 # The user may be recursing 'entire' or 'redistributable'.
271 sys.setrecursionlimit(3000)
273 s = set()
274 for f in fmri_list:
275 _get_dependencies(s, f, xport_cfg, tracker)
277 # Restore the previous default.
278 sys.setrecursionlimit(old_limit)
280 return list(s)
282 def _get_dependencies(s, pfmri, xport_cfg, tracker):
283 """Expand all dependencies."""
284 # XXX???
285 # tracker.evaluate_progress(pkgfmri=pfmri)
286 s.add(pfmri)
288 m = get_manifest(pfmri, xport_cfg)
289 for a in m.gen_actions_by_type("depend"):
290 for fmri_str in a.attrlist("fmri"):
291 new_fmri = expand_fmri(fmri_str)
292 if new_fmri and new_fmri not in s:
293 _get_dependencies(s, new_fmri, xport_cfg, tracker)
294 return s
296 def get_sizes(mfst):
297 """Takes a manifest and return
298 (get_bytes, get_files, send_bytes, send_comp_bytes) tuple."""
300 getb = 0
301 getf = 0
302 sendb = 0
303 sendcb = 0
305 for a in mfst.gen_actions():
306 if a.has_payload:
307 getb += get_pkg_otw_size(a)
308 getf += 1
309 sendb += int(a.attrs.get("pkg.size", 0))
310 sendcb += int(a.attrs.get("pkg.csize", 0))
311 if a.name == "signature":
312 getf += len(a.get_chain_certs())
313 getb += a.get_action_chain_csize()
314 return getb, getf, sendb, sendcb
316 def add_hashes_to_multi(mfst, multi):
317 """Takes a manifest and a multi object and adds the hashes to the multi
318 object."""
320 for a in mfst.gen_actions():
321 if a.has_payload:
322 multi.add_action(a)
324 def prune(fmri_list, all_versions, all_timestamps):
325 """Returns a filtered version of fmri_list based on the provided
326 parameters."""
328 if all_timestamps:
329 pass
330 elif all_versions:
331 dedup = {}
332 for f in fmri_list:
333 dedup.setdefault(f.get_short_fmri(), []).append(f)
334 fmri_list = [sorted(dedup[f], reverse=True)[0] for f in dedup]
335 else:
336 dedup = {}
337 for f in fmri_list:
338 dedup.setdefault(f.pkg_name, []).append(f)
339 fmri_list = [sorted(dedup[f], reverse=True)[0] for f in dedup]
340 return fmri_list
342 def fetch_catalog(src_pub, tracker, txport, target_catalog,
343 include_updates=False):
344 """Fetch the catalog from src_uri.
346 target_catalog is a hint about whether this is a destination catalog,
347 which helps the progress tracker render the refresh output properly."""
349 src_uri = src_pub.repository.origins[0].uri
350 tracker.refresh_start(1, full_refresh=True,
351 target_catalog=target_catalog)
352 tracker.refresh_start_pub(src_pub)
354 if not src_pub.meta_root:
355 # Create a temporary directory for catalog.
356 cat_dir = tempfile.mkdtemp(dir=temp_root,
357 prefix=global_settings.client_name + "-")
358 tmpdirs.append(cat_dir)
359 src_pub.meta_root = cat_dir
361 src_pub.transport = txport
362 try:
363 src_pub.refresh(full_refresh=True, immediate=True,
364 progtrack=tracker, include_updates=include_updates)
365 except apx.TransportError as e:
366 # Assume that a catalog doesn't exist for the target publisher,
367 # and drive on. If there was an actual failure due to a
368 # transport issue, let the failure happen whenever some other
369 # operation is attempted later.
370 return catalog.Catalog(read_only=True)
371 finally:
372 tracker.refresh_end_pub(src_pub)
373 tracker.refresh_done()
375 return src_pub.catalog
377 def main_func():
378 global archive, cache_dir, download_start, xport, xport_cfg, \
379 dest_xport, temp_root, targ_pub, target
381 all_timestamps = True
382 all_versions = False
383 dry_run = False
384 keep_compressed = False
385 list_newest = False
386 recursive = False
387 src_uri = None
388 incoming_dir = None
389 src_pub = None
390 raw = False
391 key = None
392 cert = None
393 dkey = None
394 dcert = None
395 publishers = []
396 clone = False
397 verbose = False
399 temp_root = misc.config_temp_root()
401 gettext.install("pkg", "/usr/share/locale",
402 codeset=locale.getpreferredencoding())
404 global_settings.client_name = "pkgrecv"
405 target = os.environ.get("PKG_DEST", None)
406 src_uri = os.environ.get("PKG_SRC", None)
408 try:
409 opts, pargs = getopt.getopt(sys.argv[1:], "ac:D:d:hkm:np:rs:v",
410 ["cert=", "key=", "dcert=", "dkey=", "newest", "raw",
411 "debug=", "clone"])
412 except getopt.GetoptError as e:
413 usage(_("Illegal option -- {0}").format(e.opt))
415 for opt, arg in opts:
416 if opt == "-a":
417 archive = True
418 elif opt == "-c":
419 cache_dir = arg
420 elif opt == "--clone":
421 clone = True
422 elif opt == "-d":
423 target = arg
424 elif opt == "-D" or opt == "--debug":
425 if arg in ["plan", "transport"]:
426 key = arg
427 value = "True"
428 else:
429 try:
430 key, value = arg.split("=", 1)
431 except (AttributeError, ValueError):
432 usage(_("{opt} takes argument of form "
433 "name=value, not {arg}").format(
434 opt= opt, arg=arg))
435 DebugValues.set_value(key, value)
436 elif opt == "-h":
437 usage(retcode=0)
438 elif opt == "-k":
439 keep_compressed = True
440 elif opt == "-m":
441 if arg == "all-timestamps":
442 all_timestamps = True
443 all_versions = False
444 elif arg == "all-versions":
445 all_timestamps = False
446 all_versions = True
447 elif arg == "latest":
448 all_timestamps = False
449 all_versions = False
450 else:
451 usage(_("Illegal option value -- {0}").format(
452 arg))
453 elif opt == "-n":
454 dry_run = True
455 elif opt == "-p":
456 publishers.append(arg)
457 elif opt == "-r":
458 recursive = True
459 elif opt == "-s":
460 src_uri = arg
461 elif opt == "-v":
462 verbose = True
463 elif opt == "--newest":
464 list_newest = True
465 elif opt == "--raw":
466 raw = True
467 elif opt == "--key":
468 key = arg
469 elif opt == "--cert":
470 cert = arg
471 elif opt == "--dkey":
472 dkey = arg
473 elif opt == "--dcert":
474 dcert = arg
476 if not list_newest and not target:
477 usage(_("a destination must be provided"))
479 if not src_uri:
480 usage(_("a source repository must be provided"))
481 else:
482 src_uri = misc.parse_uri(src_uri)
484 if not cache_dir:
485 cache_dir = tempfile.mkdtemp(dir=temp_root,
486 prefix=global_settings.client_name + "-")
487 # Only clean-up cache dir if implicitly created by pkgrecv.
488 # User's cache-dirs should be preserved
489 tmpdirs.append(cache_dir)
490 else:
491 if clone:
492 usage(_("--clone can not be used with -c.\n"
493 "Content will be downloaded directly to the "
494 "destination repository and re-downloading after a "
495 "pkgrecv failure will not be required."))
497 if clone and raw:
498 usage(_("--clone can not be used with --raw.\n"))
500 if clone and archive:
501 usage(_("--clone can not be used with -a.\n"))
503 if clone and list_newest:
504 usage(_("--clone can not be used with --newest.\n"))
506 if clone and pargs:
507 usage(_("--clone does not support FMRI patterns"))
509 if publishers and not clone:
510 usage(_("-p can only be used with --clone.\n"))
512 incoming_dir = tempfile.mkdtemp(dir=temp_root,
513 prefix=global_settings.client_name + "-")
514 tmpdirs.append(incoming_dir)
516 # Create transport and transport config
517 xport, xport_cfg = transport.setup_transport()
518 xport_cfg.add_cache(cache_dir, readonly=False)
519 xport_cfg.incoming_root = incoming_dir
521 # Since publication destinations may only have one repository configured
522 # per publisher, create destination as separate transport in case source
523 # and destination have identical publisher configuration but different
524 # repository endpoints.
525 dest_xport, dest_xport_cfg = transport.setup_transport()
526 dest_xport_cfg.add_cache(cache_dir, readonly=False)
527 dest_xport_cfg.incoming_root = incoming_dir
529 # Configure src publisher(s).
530 transport.setup_publisher(src_uri, "source", xport, xport_cfg,
531 remote_prefix=True, ssl_key=key, ssl_cert=cert)
533 args = (pargs, target, list_newest, all_versions,
534 all_timestamps, keep_compressed, raw, recursive, dry_run, verbose,
535 dest_xport_cfg, src_uri, dkey, dcert)
537 if clone:
538 args += (publishers,)
539 return clone_repo(*args)
541 if archive:
542 # Retrieving package data for archival requires a different mode
543 # of operation so gets its own routine. Notably, it requires
544 # that all package data be retrieved before the archival process
545 # is started.
546 return archive_pkgs(*args)
548 # Normal package transfer allows operations on a per-package basis.
549 return transfer_pkgs(*args)
551 def check_processed(any_matched, any_unmatched, total_processed):
552 # Reduce unmatched patterns to those that were unmatched for all
553 # publishers.
554 unmatched = set(any_unmatched) - set(any_matched)
556 if not unmatched:
557 return
559 # If any match failures remain, abort with an error.
560 rval = 1
561 if total_processed > 0:
562 rval = 3
563 abort(str(apx.PackageMatchErrors(unmatched_fmris=unmatched)),
564 retcode=rval)
566 def get_matches(src_pub, tracker, xport, pargs, any_unmatched, any_matched,
567 all_versions, all_timestamps, recursive):
568 """Returns the set of matching FMRIs for the given arguments."""
569 global src_cat
571 src_cat = fetch_catalog(src_pub, tracker, xport, False)
572 # Avoid overhead of going through matching if user requested all
573 # packages.
574 if "*" not in pargs and "*@*" not in pargs:
575 try:
576 matches, refs, unmatched = \
577 src_cat.get_matching_fmris(pargs)
578 except apx.PackageMatchErrors as e:
579 abort(str(e))
581 # Track anything that failed to match.
582 any_unmatched.extend(unmatched)
583 any_matched.extend(set(p for p in refs.values()))
584 matches = list(set(f for m in matches.values() for f in m))
585 else:
586 matches = [f for f in src_cat.fmris()]
588 if not matches:
589 # No matches at all; nothing to do for this publisher.
590 return matches
592 matches = prune(matches, all_versions, all_timestamps)
593 if recursive:
594 msg(_("Retrieving manifests for dependency "
595 "evaluation ..."))
596 matches = prune(get_dependencies(matches, xport_cfg, tracker),
597 all_versions, all_timestamps)
599 return matches
601 def archive_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
602 keep_compresed, raw, recursive, dry_run, verbose, dest_xport_cfg, src_uri,
603 dkey, dcert):
604 """Retrieve source package data completely and then archive it."""
606 global cache_dir, download_start, xport, xport_cfg
608 target = os.path.abspath(target)
609 if os.path.exists(target):
610 error(_("Target archive '{0}' already "
611 "exists.").format(target))
612 abort()
614 # Open the archive early so that permissions failures, etc. can be
615 # detected before actual work is started.
616 if not dry_run:
617 pkg_arc = pkg.p5p.Archive(target, mode="w")
619 basedir = tempfile.mkdtemp(dir=temp_root,
620 prefix=global_settings.client_name + "-")
621 tmpdirs.append(basedir)
623 # Retrieve package data for all publishers.
624 any_unmatched = []
625 any_matched = []
626 invalid_manifests = []
627 total_processed = 0
628 arc_bytes = 0
629 archive_list = []
630 for src_pub in xport_cfg.gen_publishers():
631 # Root must be per publisher on the off chance that multiple
632 # publishers have the same package.
633 xport_cfg.pkg_root = os.path.join(basedir, src_pub.prefix)
635 tracker = get_tracker()
636 msg(_("Retrieving packages for publisher {0} ...").format(
637 src_pub.prefix))
638 if pargs == None or len(pargs) == 0:
639 usage(_("must specify at least one pkgfmri"))
641 matches = get_matches(src_pub, tracker, xport, pargs,
642 any_unmatched, any_matched, all_versions, all_timestamps,
643 recursive)
644 if not matches:
645 # No matches at all; nothing to do for this publisher.
646 continue
648 # First, retrieve the manifests and calculate package transfer
649 # sizes.
650 npkgs = len(matches)
651 get_bytes = 0
652 get_files = 0
654 if not recursive:
655 msg(_("Retrieving and evaluating {0:d} package(s)...").format(
656 npkgs))
659 tracker.manifest_fetch_start(npkgs)
661 good_matches = []
662 for f in matches:
663 try:
664 m = get_manifest(f, xport_cfg)
665 except apx.InvalidPackageErrors as e:
666 invalid_manifests.extend(e.errors)
667 continue
668 good_matches.append(f)
669 getb, getf, arcb, arccb = get_sizes(m)
670 get_bytes += getb
671 get_files += getf
673 # Since files are going into the archive, progress
674 # can be tracked in terms of compressed bytes for
675 # the package files themselves.
676 arc_bytes += arccb
678 # Also include the the manifest file itself in the
679 # amount of bytes to archive.
680 try:
681 fs = os.stat(m.pathname)
682 arc_bytes += fs.st_size
683 except EnvironmentError as e:
684 raise apx._convert_error(e)
686 tracker.manifest_fetch_progress(completion=True)
687 matches = good_matches
689 tracker.manifest_fetch_done()
691 # Next, retrieve the content for this publisher's packages.
692 tracker.download_set_goal(len(matches), get_files,
693 get_bytes)
695 if verbose:
696 if not dry_run:
697 msg(_("\nArchiving packages ..."))
698 else:
699 msg(_("\nArchiving packages (dry-run) ..."))
700 status = []
701 status.append((_("Packages to add:"), str(len(matches))))
702 status.append((_("Files to retrieve:"), str(get_files)))
703 status.append((_("Estimated transfer size:"),
704 misc.bytes_to_str(get_bytes)))
706 rjust_status = max(len(s[0]) for s in status)
707 rjust_value = max(len(s[1]) for s in status)
708 for s in status:
709 msg("{0} {1}".format(s[0].rjust(rjust_status),
710 s[1].rjust(rjust_value)))
712 msg(_("\nPackages to archive:"))
713 for f in sorted(matches):
714 fmri = f.get_fmri(anarchy=True,
715 include_scheme=False)
716 msg(fmri)
717 msg()
719 if dry_run:
720 # Don't call download_done here; it would cause an
721 # assertion failure since nothing was downloaded.
722 # Instead, call the method that simply finishes
723 # up the progress output.
724 tracker.download_done(dryrun=True)
725 cleanup()
726 total_processed = len(matches)
727 continue
729 for f in matches:
730 tracker.download_start_pkg(f)
731 pkgdir = xport_cfg.get_pkg_dir(f)
732 mfile = xport.multi_file_ni(src_pub, pkgdir,
733 progtrack=tracker)
734 m = get_manifest(f, xport_cfg)
735 add_hashes_to_multi(m, mfile)
737 if mfile:
738 download_start = True
739 mfile.wait_files()
741 if not dry_run:
742 archive_list.append((f, m.pathname, pkgdir))
744 # Nothing more to do for this package.
745 tracker.download_end_pkg(f)
746 total_processed += 1
748 tracker.download_done()
749 tracker.reset()
751 # Check processed patterns and abort with failure if some were
752 # unmatched.
753 check_processed(any_matched, any_unmatched, total_processed)
755 if not dry_run:
756 # Now create archive and then archive retrieved package data.
757 while archive_list:
758 pfmri, mpath, pkgdir = archive_list.pop()
759 pkg_arc.add_package(pfmri, mpath, pkgdir)
760 pkg_arc.close(progtrack=tracker)
762 # Dump all temporary data.
763 cleanup()
765 if invalid_manifests:
766 error(_("The following errors were encountered. The packages "
767 "listed were not\nreceived.\n{0}").format(
768 "\n".join(str(im) for im in invalid_manifests)))
769 if invalid_manifests and total_processed:
770 return pkgdefs.EXIT_PARTIAL
771 if invalid_manifests:
772 return pkgdefs.EXIT_OOPS
773 return pkgdefs.EXIT_OK
776 def clone_repo(pargs, target, list_newest, all_versions, all_timestamps,
777 keep_compressed, raw, recursive, dry_run, verbose, dest_xport_cfg, src_uri,
778 dkey, dcert, publishers):
780 global cache_dir, download_start, xport, xport_cfg, dest_xport
782 invalid_manifests = []
783 total_processed = 0
784 modified_pubs = set()
785 deleted_pkgs = False
786 old_c_root = {}
787 del_search_index = set()
789 # Turn target into a valid URI.
790 target = publisher.RepositoryURI(misc.parse_uri(target))
792 if target.scheme != "file":
793 abort(err=_("Destination clone repository must be "
794 "filesystem-based."))
796 # Initialize the target repo.
797 try:
798 repo = sr.Repository(read_only=False,
799 root=target.get_pathname())
800 except sr.RepositoryInvalidError as e:
801 txt = str(e) + "\n\n"
802 txt += _("To create a repository, use the pkgrepo command.")
803 abort(err=txt)
805 def copy_catalog(src_cat_root, pub):
806 # Copy catalog files.
807 c_root = repo.get_pub_rstore(pub).catalog_root
808 rstore_root = repo.get_pub_rstore(pub).root
809 try:
810 # We just use mkdtemp() to find ourselves a directory
811 # which does not already exist. The created dir is not
812 # used.
813 old_c_root = tempfile.mkdtemp(dir=rstore_root,
814 prefix='catalog-')
815 shutil.rmtree(old_c_root)
816 shutil.move(c_root, old_c_root)
817 shutil.copytree(src_cat_root, c_root)
818 except Exception as e:
819 abort(err=_("Unable to copy catalog files: {0}").format(
821 return old_c_root
823 # Check if all publishers in src are also in target. If not, add
824 # depending on what publishers were specified by user.
825 pubs_to_sync = []
826 pubs_to_add = []
827 src_pubs = {}
828 for sp in xport_cfg.gen_publishers():
829 src_pubs[sp.prefix] = sp
830 dst_pubs = repo.get_publishers()
832 pubs_specified = False
833 unknown_pubs = []
834 for p in publishers:
835 if p not in src_pubs and p != '*':
836 abort(err=_("The publisher {0} does not exist in the "
837 "source repository.".format(p)))
838 pubs_specified = True
840 for sp in src_pubs:
841 if sp not in dst_pubs and (sp in publishers or \
842 '*' in publishers):
843 pubs_to_add.append(src_pubs[sp])
844 pubs_to_sync.append(src_pubs[sp])
845 elif sp in dst_pubs and (sp in publishers or '*' in publishers
846 or not pubs_specified):
847 pubs_to_sync.append(src_pubs[sp])
848 elif not pubs_specified:
849 unknown_pubs.append(sp)
851 # We only print warning if the user didn't specify any valid publishers
852 # to add/sync.
853 if len(unknown_pubs):
854 txt = _("\nThe following publishers are present in the "
855 "source repository but not in the target repository.\n"
856 "Please use -p to specify which publishers need to be "
857 "cloned or -p '*' to clone all publishers.")
858 for p in unknown_pubs:
859 txt += "\n {0}\n".format(p)
860 abort(err=txt)
862 # Create non-existent publishers.
863 for p in pubs_to_add:
864 if not dry_run:
865 msg(_("Adding publisher {0} ...").format(p.prefix))
866 # add_publisher() will create a p5i file in the repo
867 # store, containing origin and possible mirrors from
868 # the src repo. These may not be valid for the new repo
869 # so skip creation of this file.
870 repo.add_publisher(p, skip_config=True)
871 else:
872 msg(_("Adding publisher {0} (dry-run) ...").format(
873 p.prefix))
875 for src_pub in pubs_to_sync:
876 msg(_("Processing packages for publisher {0} ...").format(
877 src_pub.prefix))
878 tracker = get_tracker()
880 src_basedir = tempfile.mkdtemp(dir=temp_root,
881 prefix=global_settings.client_name + "-")
882 tmpdirs.append(src_basedir)
884 xport_cfg.pkg_root = src_basedir
886 # We make the destination repo our cache directory to save on
887 # IOPs. Have to remove all the old caches first.
888 if not dry_run:
889 xport_cfg.clear_caches(shared=True)
890 xport_cfg.add_cache(
891 repo.get_pub_rstore(src_pub.prefix).file_root,
892 readonly=False)
894 # Retrieve src and dest catalog for comparison.
895 src_pub.meta_root = src_basedir
897 src_cat = fetch_catalog(src_pub, tracker, xport, False,
898 include_updates=True)
899 src_cat_root = src_cat.meta_root
901 try:
902 targ_cat = repo.get_catalog(pub=src_pub.prefix)
903 except sr.RepositoryUnknownPublisher:
904 targ_cat = catalog.Catalog(read_only=True)
906 src_fmris = set([x for x in src_cat.fmris(last=False)])
907 targ_fmris = set([x for x in targ_cat.fmris(last=False)])
909 del src_cat
910 del targ_cat
912 to_add = []
913 to_rm = []
915 # We use bulk prefetching for faster transport of the manifests.
916 # Prefetch requires an intent which it sends to the server. Here
917 # we just use operation=clone for all FMRIs.
918 intent = "operation=clone;"
920 # Find FMRIs which need to be added/removed.
921 to_add_set = src_fmris - targ_fmris
922 to_rm = targ_fmris - src_fmris
924 for f in to_add_set:
925 to_add.append((f, intent))
927 del src_fmris
928 del targ_fmris
929 del to_add_set
931 # We have to do package removal first because after the sync we
932 # don't have the old catalog anymore and if we delete packages
933 # after the sync based on the current catalog we might delete
934 # files required by packages still in the repo.
935 if len(to_rm) > 0:
936 msg(_("Packages to remove:"))
937 for f in to_rm:
938 msg(" {0}".format(f.get_fmri(anarchy=True,
939 include_build=False)))
941 if not dry_run:
942 msg(_("Removing packages ..."))
943 if repo.get_pub_rstore(
944 src_pub.prefix).search_available:
945 del_search_index.add(src_pub.prefix)
946 repo.remove_packages(to_rm, progtrack=tracker,
947 pub=src_pub.prefix)
948 deleted_pkgs = True
949 total_processed += len(to_rm)
950 modified_pubs.add(src_pub.prefix)
952 if len(to_add) == 0:
953 msg(_("No packages to add."))
954 if deleted_pkgs:
955 old_c_root[src_pub.prefix] = copy_catalog(
956 src_cat_root, src_pub.prefix)
957 continue
959 get_bytes = 0
960 get_files = 0
962 msg(_("Retrieving and evaluating {0:d} package(s)...").format(
963 len(to_add)))
965 # Retrieve manifests.
966 # Try prefetching manifests in bulk first for faster, parallel
967 # transport. Retryable errors during prefetch are ignored and
968 # manifests are retrieved again during the "Reading" phase.
969 src_pub.transport.prefetch_manifests(to_add, progtrack=tracker)
971 # Need to change the output of mfst_fetch since otherwise we
972 # would see "Download Manifests x/y" twice, once from the
973 # prefetch and once from the actual manifest analysis.
974 old_gti = tracker.mfst_fetch
975 tracker.mfst_fetch = progress.GoalTrackerItem(
976 _("Reading Manifests"))
977 tracker.manifest_fetch_start(len(to_add))
978 for f, i in to_add:
979 try:
980 m = get_manifest(f, xport_cfg)
981 except apx.InvalidPackageErrors as e:
982 invalid_manifests.extend(e.errors)
983 continue
984 getb, getf, sendb, sendcb = get_sizes(m)
985 get_bytes += getb
986 get_files += getf
988 if dry_run:
989 tracker.manifest_fetch_progress(completion=True)
990 continue
992 # Move manifest into dest repo.
993 targ_path = os.path.join(
994 repo.get_pub_rstore(src_pub.prefix).root, 'pkg')
995 dp = m.fmri.get_dir_path()
996 dst_path = os.path.join(targ_path, dp)
997 src_path = os.path.join(src_basedir, dp, 'manifest')
998 dir_name = os.path.dirname(dst_path)
999 try:
1000 misc.makedirs(dir_name)
1001 shutil.move(src_path, dst_path)
1002 except Exception as e:
1003 txt = _("Unable to copy manifest: {0}").format(e)
1004 abort(err=txt)
1006 tracker.manifest_fetch_progress(completion=True)
1008 tracker.manifest_fetch_done()
1009 # Restore old GoalTrackerItem for manifest download.
1010 tracker.mfst_fetch = old_gti
1012 if verbose:
1013 if not dry_run:
1014 msg(_("\nRetrieving packages ..."))
1015 else:
1016 msg(_("\nRetrieving packages (dry-run) ..."))
1018 status = []
1019 status.append((_("Packages to add:"), str(len(to_add))))
1020 status.append((_("Files to retrieve:"), str(get_files)))
1021 status.append((_("Estimated transfer size:"),
1022 misc.bytes_to_str(get_bytes)))
1024 rjust_status = max(len(s[0]) for s in status)
1025 rjust_value = max(len(s[1]) for s in status)
1026 for s in status:
1027 msg("{0} {1}".format(s[0].rjust(rjust_status),
1028 s[1].rjust(rjust_value)))
1030 msg(_("\nPackages to transfer:"))
1031 for f, i in sorted(to_add):
1032 fmri = f.get_fmri(anarchy=True,
1033 include_scheme=False)
1034 msg("{0}".format(fmri))
1035 msg()
1037 if dry_run:
1038 continue
1040 tracker.download_set_goal(len(to_add), get_files, get_bytes)
1042 # Retrieve package files.
1043 for f, i in to_add:
1044 tracker.download_start_pkg(f)
1045 mfile = xport.multi_file_ni(src_pub, None,
1046 progtrack=tracker)
1047 m = get_manifest(f, xport_cfg)
1048 add_hashes_to_multi(m, mfile)
1050 if mfile:
1051 mfile.wait_files()
1053 tracker.download_end_pkg(f)
1054 total_processed += 1
1056 tracker.download_done
1057 tracker.reset()
1059 modified_pubs.add(src_pub.prefix)
1060 old_c_root[src_pub.prefix] = copy_catalog(src_cat_root,
1061 src_pub.prefix)
1063 if invalid_manifests:
1064 error(_("The following packages could not be retrieved:\n{0}").format(
1065 "\n".join(str(im) for im in invalid_manifests)))
1067 ret = 0
1068 # Run pkgrepo verify to check repo.
1069 if total_processed:
1070 msg(_("\n\nVerifying repository contents."))
1071 cmd = os.path.join(os.path.dirname(misc.api_cmdpath()),
1072 "pkgrepo")
1073 args = [cmd, 'verify', '-s',
1074 target.get_pathname(), '--disable', 'dependency']
1076 try:
1077 ret = subprocess.call(args)
1078 except OSError as e:
1079 raise RuntimeError("cannot execute {0}: {1}".format(
1080 args, e))
1082 # Cleanup. If verification was ok, remove backup copy of old catalog.
1083 # If not, move old catalog back into place and remove messed up catalog.
1084 for pub in modified_pubs:
1085 c_root = repo.get_pub_rstore(pub).catalog_root
1086 try:
1087 if ret:
1088 shutil.rmtree(c_root)
1089 shutil.move(old_c_root[pub], c_root)
1090 else:
1091 shutil.rmtree(old_c_root[pub])
1092 except Exception as e:
1093 error(_("Unable to remove catalog files: {0}").format(e))
1094 # We don't abort here to make sure we can
1095 # restore/delete as much as we can.
1096 continue
1098 if ret:
1099 txt = _("Pkgrepo verify found errors in the updated repository."
1100 "\nThe original package catalog has been restored.\n")
1101 if deleted_pkgs:
1102 txt += _("Deleted packages can not be restored.\n")
1103 txt += _("The clone operation can be retried; package content "
1104 "that has already been retrieved will not be downloaded "
1105 "again.")
1106 abort(err=txt)
1108 if del_search_index:
1109 txt = _("\nThe search index for the following publishers has "
1110 "been removed due to package removals.\n")
1111 for p in del_search_index:
1112 txt += " {0}\n".format(p)
1113 txt += _("\nTo restore the search index for all publishers run"
1114 "\n'pkgrepo refresh --no-catalog -s {0}'.\n").format(
1115 target.get_pathname())
1116 msg(txt)
1118 cleanup()
1119 if invalid_manifests and total_processed:
1120 return pkgdefs.EXIT_PARTIAL
1121 if invalid_manifests:
1122 return pkgdefs.EXIT_OOPS
1123 return pkgdefs.EXIT_OK
1125 def transfer_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
1126 keep_compressed, raw, recursive, dry_run, verbose, dest_xport_cfg, src_uri,
1127 dkey, dcert):
1128 """Retrieve source package data and optionally republish it as each
1129 package is retrieved.
1132 global cache_dir, download_start, xport, xport_cfg, dest_xport, targ_pub
1134 any_unmatched = []
1135 any_matched = []
1136 invalid_manifests = []
1137 total_processed = 0
1139 for src_pub in xport_cfg.gen_publishers():
1140 tracker = get_tracker()
1141 if list_newest:
1142 # Make sure the prog tracker knows we're doing a listing
1143 # operation so that it suppresses irrelevant output.
1144 tracker.set_purpose(tracker.PURPOSE_LISTING)
1146 if pargs or len(pargs) > 0:
1147 usage(_("--newest takes no options"))
1149 src_cat = fetch_catalog(src_pub, tracker,
1150 xport, False)
1151 for f in src_cat.fmris(ordered=True, last=True):
1152 msg(f.get_fmri(include_build=False))
1153 continue
1155 msg(_("Processing packages for publisher {0} ...").format(
1156 src_pub.prefix))
1157 if pargs == None or len(pargs) == 0:
1158 usage(_("must specify at least one pkgfmri"))
1160 republish = False
1162 if not raw:
1163 basedir = tempfile.mkdtemp(dir=temp_root,
1164 prefix=global_settings.client_name + "-")
1165 tmpdirs.append(basedir)
1166 republish = True
1168 # Turn target into a valid URI.
1169 target = misc.parse_uri(target)
1171 # Setup target for transport.
1172 targ_pub = transport.setup_publisher(target,
1173 src_pub.prefix, dest_xport, dest_xport_cfg,
1174 ssl_key=dkey, ssl_cert=dcert)
1176 # Files have to be decompressed for republishing.
1177 keep_compressed = False
1178 if target.startswith("file://"):
1179 # Check to see if the repository exists first.
1180 try:
1181 t = trans.Transaction(target,
1182 xport=dest_xport, pub=targ_pub)
1183 except trans.TransactionRepositoryInvalidError as e:
1184 txt = str(e) + "\n\n"
1185 txt += _("To create a repository, use "
1186 "the pkgrepo command.")
1187 abort(err=txt)
1188 except trans.TransactionRepositoryConfigError as e:
1189 txt = str(e) + "\n\n"
1190 txt += _("The repository configuration "
1191 "for the repository located at "
1192 "'{0}' is not valid or the "
1193 "specified path does not exist. "
1194 "Please correct the configuration "
1195 "of the repository or create a new "
1196 "one.").format(target)
1197 abort(err=txt)
1198 except trans.TransactionError as e:
1199 abort(err=e)
1200 else:
1201 basedir = target = os.path.abspath(target)
1202 if not os.path.exists(basedir):
1203 try:
1204 os.makedirs(basedir, misc.PKG_DIR_MODE)
1205 except Exception as e:
1206 error(_("Unable to create basedir "
1207 "'{dir}': {err}").format(
1208 dir=basedir, err=e))
1209 abort()
1211 xport_cfg.pkg_root = basedir
1212 dest_xport_cfg.pkg_root = basedir
1214 if republish:
1215 targ_cat = fetch_catalog(targ_pub, tracker,
1216 dest_xport, True)
1218 matches = get_matches(src_pub, tracker, xport, pargs,
1219 any_unmatched, any_matched, all_versions, all_timestamps,
1220 recursive)
1221 if not matches:
1222 # No matches at all; nothing to do for this publisher.
1223 continue
1225 def get_basename(pfmri):
1226 open_time = pfmri.get_timestamp()
1227 return "{0:d}_{1}".format(
1228 calendar.timegm(open_time.utctimetuple()),
1229 urllib.quote(str(pfmri), ""))
1231 # First, retrieve the manifests and calculate package transfer
1232 # sizes.
1233 npkgs = len(matches)
1234 get_bytes = 0
1235 get_files = 0
1236 send_bytes = 0
1238 if not recursive:
1239 msg(_("Retrieving and evaluating {0:d} package(s)...").format(
1240 npkgs))
1242 tracker.manifest_fetch_start(npkgs)
1244 pkgs_to_get = []
1245 while matches:
1246 f = matches.pop()
1247 if republish and targ_cat.get_entry(f):
1248 tracker.manifest_fetch_progress(completion=True)
1249 continue
1250 try:
1251 m = get_manifest(f, xport_cfg)
1252 except apx.InvalidPackageErrors as e:
1253 invalid_manifests.extend(e.errors)
1254 continue
1255 pkgs_to_get.append(f)
1257 getb, getf, sendb, sendcb = get_sizes(m)
1258 get_bytes += getb
1259 get_files += getf
1260 if republish:
1261 # For now, normal republication always uses
1262 # uncompressed data as already compressed data
1263 # is not supported for publication.
1264 send_bytes += sendb
1266 tracker.manifest_fetch_progress(completion=True)
1267 tracker.manifest_fetch_done()
1269 # Next, retrieve and store the content for each package.
1270 tracker.republish_set_goal(len(pkgs_to_get), get_bytes,
1271 send_bytes)
1273 if verbose:
1274 if not dry_run:
1275 msg(_("\nRetrieving packages ..."))
1276 else:
1277 msg(_("\nRetrieving packages (dry-run) ..."))
1278 status = []
1279 status.append((_("Packages to add:"),
1280 str(len(pkgs_to_get))))
1281 status.append((_("Files to retrieve:"),
1282 str(get_files)))
1283 status.append((_("Estimated transfer size:"),
1284 misc.bytes_to_str(get_bytes)))
1286 rjust_status = max(len(s[0]) for s in status)
1287 rjust_value = max(len(s[1]) for s in status)
1288 for s in status:
1289 msg("{0} {1}".format(s[0].rjust(rjust_status),
1290 s[1].rjust(rjust_value)))
1292 msg(_("\nPackages to transfer:"))
1293 for f in sorted(pkgs_to_get):
1294 fmri = f.get_fmri(anarchy=True,
1295 include_scheme=False)
1296 msg("{0}".format(fmri))
1297 msg()
1299 if dry_run:
1300 tracker.republish_done(dryrun=True)
1301 cleanup()
1302 continue
1304 processed = 0
1305 pkgs_to_get = sorted(pkgs_to_get)
1306 for f in pkgs_to_get:
1307 tracker.republish_start_pkg(f)
1308 pkgdir = xport_cfg.get_pkg_dir(f)
1309 mfile = xport.multi_file_ni(src_pub, pkgdir,
1310 not keep_compressed, tracker)
1311 m = get_manifest(f, xport_cfg)
1312 add_hashes_to_multi(m, mfile)
1314 if mfile:
1315 download_start = True
1316 mfile.wait_files()
1318 if not republish:
1319 # Nothing more to do for this package.
1320 tracker.republish_end_pkg(f)
1321 continue
1323 # Get first line of original manifest so that inclusion
1324 # of the scheme can be determined.
1325 use_scheme = True
1326 contents = get_manifest(f, xport_cfg, contents=True)
1327 if contents.splitlines()[0].find("pkg:/") == -1:
1328 use_scheme = False
1330 pkg_name = f.get_fmri(include_scheme=use_scheme)
1331 pkgdir = xport_cfg.get_pkg_dir(f)
1333 # This is needed so any previous failures for a package
1334 # can be aborted.
1335 trans_id = get_basename(f)
1337 if not targ_pub:
1338 targ_pub = transport.setup_publisher(target,
1339 src_pub.prefix, dest_xport, dest_xport_cfg,
1340 remote_prefix=True, ssl_key=dkey,
1341 ssl_cert=dcert)
1343 try:
1344 t = trans.Transaction(target, pkg_name=pkg_name,
1345 trans_id=trans_id, xport=dest_xport,
1346 pub=targ_pub, progtrack=tracker)
1348 # Remove any previous failed attempt to
1349 # to republish this package.
1350 try:
1351 t.close(abandon=True)
1352 except:
1353 # It might not exist already.
1354 pass
1356 t.open()
1357 for a in m.gen_actions():
1358 if a.name == "set" and \
1359 a.attrs.get("name", "") in ("fmri",
1360 "pkg.fmri"):
1361 # To be consistent with the
1362 # server, the fmri can't be
1363 # added to the manifest.
1364 continue
1366 if hasattr(a, "hash"):
1367 fname = os.path.join(pkgdir,
1368 a.hash)
1369 a.data = lambda: open(fname,
1370 "rb")
1371 t.add(a)
1372 if a.name == "signature":
1373 # We always store content in the
1374 # repository by the least-
1375 # preferred hash.
1376 for fp in a.get_chain_certs(
1377 least_preferred=True):
1378 fname = os.path.join(
1379 pkgdir, fp)
1380 t.add_file(fname)
1381 # Always defer catalog update.
1382 t.close(add_to_catalog=False)
1383 except trans.TransactionError as e:
1384 abort(err=e)
1386 # Dump data retrieved so far after each successful
1387 # republish to conserve space.
1388 try:
1389 shutil.rmtree(dest_xport_cfg.incoming_root)
1390 shutil.rmtree(pkgdir)
1391 if cache_dir in tmpdirs:
1392 # If cache_dir is listed in tmpdirs,
1393 # then it's safe to dump cache contents.
1394 # Otherwise, it's a user cache directory
1395 # and shouldn't be dumped.
1396 shutil.rmtree(cache_dir)
1397 misc.makedirs(cache_dir)
1398 except EnvironmentError as e:
1399 raise apx._convert_error(e)
1400 misc.makedirs(dest_xport_cfg.incoming_root)
1402 processed += 1
1403 tracker.republish_end_pkg(f)
1405 tracker.republish_done()
1406 tracker.reset()
1408 if processed > 0:
1409 # If any packages were published, trigger an update of
1410 # the catalog.
1411 total_processed += processed
1412 dest_xport.publish_refresh_packages(targ_pub)
1414 # Prevent further use.
1415 targ_pub = None
1417 # Check processed patterns and abort with failure if some were
1418 # unmatched.
1419 check_processed(any_matched, any_unmatched, total_processed)
1421 # Dump all temporary data.
1422 cleanup()
1423 if invalid_manifests:
1424 error(_("The following errors were encountered. The packages "
1425 "listed were not\nreceived.\n{0}").format(
1426 "\n".join(str(im) for im in invalid_manifests)))
1427 if invalid_manifests and total_processed:
1428 return pkgdefs.EXIT_PARTIAL
1429 if invalid_manifests:
1430 return pkgdefs.EXIT_OOPS
1431 return pkgdefs.EXIT_OK
1433 if __name__ == "__main__":
1435 # Make all warnings be errors.
1436 warnings.simplefilter('error')
1438 try:
1439 __ret = main_func()
1440 except (KeyboardInterrupt, apx.CanceledException):
1441 try:
1442 cleanup(True)
1443 except:
1444 __ret = 99
1445 else:
1446 __ret = 1
1447 except (pkg.actions.ActionError, trans.TransactionError, RuntimeError,
1448 apx.ApiException) as _e:
1449 error(_e)
1450 try:
1451 cleanup(True)
1452 except:
1453 __ret = 99
1454 else:
1455 __ret = 1
1456 except PipeError:
1457 # We don't want to display any messages here to prevent
1458 # possible further broken pipe (EPIPE) errors.
1459 try:
1460 cleanup(False)
1461 except:
1462 __ret = 99
1463 else:
1464 __ret = 1
1465 except SystemExit as _e:
1466 try:
1467 cleanup(False)
1468 except:
1469 __ret = 99
1470 raise _e
1471 except EnvironmentError as _e:
1472 if _e.errno != errno.ENOSPC and _e.errno != errno.EDQUOT:
1473 raise
1475 txt = "\n"
1476 if _e.errno == errno.EDQUOT:
1477 txt += _("Storage space quota exceeded.")
1478 else:
1479 txt += _("No storage space left.")
1481 temp_root_path = misc.get_temp_root_path()
1482 tdirs = [temp_root_path]
1483 if cache_dir not in tmpdirs:
1484 # Only include in message if user specified.
1485 tdirs.append(cache_dir)
1486 if target and target.startswith("file://"):
1487 tdirs.append(target)
1489 txt += "\n"
1490 error(txt + _("Please verify that the filesystem containing "
1491 "the following directories has enough space available:\n"
1492 "{0}").format("\n".join(tdirs)))
1493 try:
1494 cleanup()
1495 except:
1496 __ret = 99
1497 else:
1498 __ret = 1
1499 except:
1500 traceback.print_exc()
1501 error(misc.get_traceback_message())
1502 __ret = 99
1503 # Cleanup must be called *after* error messaging so that
1504 # exceptions processed during cleanup don't cause the wrong
1505 # traceback to be printed.
1506 try:
1507 cleanup(True)
1508 except:
1509 pass
1510 sys.exit(__ret)