8322 nl: misleading-indentation
[unleashed/tickless.git] / usr / src / tools / onbld / Scm / Backup.py
blob6715243d95b9ff5177690e22743f188d753efef6
2 # This program is free software; you can redistribute it and/or modify
3 # it under the terms of the GNU General Public License version 2
4 # as published by the Free Software Foundation.
6 # This program is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # GNU General Public License for more details.
11 # You should have received a copy of the GNU General Public License
12 # along with this program; if not, write to the Free Software
13 # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
17 # Copyright 2009 Sun Microsystems, Inc. All rights reserved.
18 # Use is subject to license terms.
20 # Copyright 2008, 2011, Richard Lowe
24 '''
25 Workspace backup
27 Backup format is:
28 backupdir/
29 wsname/
30 generation#/
31 dirstate (handled by CdmUncommittedBackup)
32 File containing dirstate nodeid (the changeset we need
33 to update the workspace to after applying the bundle).
34 This is the node to which the working copy changes
35 (see 'diff', below) will be applied if applicable.
37 bundle (handled by CdmCommittedBackup)
38 An Hg bundle containing outgoing committed changes.
40 nodes (handled by CdmCommittedBackup)
41 A text file listing the full (hex) nodeid of all nodes in
42 bundle, used by need_backup.
44 diff (handled by CdmUncommittedBackup)
45 A Git-formatted diff containing uncommitted changes.
47 renames (handled by CdmUncommittedBackup)
48 A list of renames in the working copy that have to be
49 applied manually, rather than by the diff.
51 metadata.tar.gz (handled by CdmMetadataBackup)
52 $CODEMGR_WS/.hg/hgrc
53 $CODEMGR_WS/.hg/localtags
54 $CODEMGR_WS/.hg/patches (Mq data)
56 clear.tar.gz (handled by CdmClearBackup)
57 <short node>/
58 copies of each modified or added file, as it is in
59 this head.
61 ... for each outgoing head
63 working/
64 copies of each modified or added file in the
65 working copy if any.
67 latest -> generation#
68 Newest backup generation.
70 All files in a given backup generation, with the exception of
71 dirstate, are optional.
72 '''
74 import grp, os, pwd, shutil, tarfile, time, traceback
75 from cStringIO import StringIO
77 from mercurial import changegroup, cmdutil, error, node, patch, util
78 from onbld.Scm import Version
81 class CdmNodeMissing(util.Abort):
82 '''a required node is not present in the destination workspace.
84 This may occur both in the case where the bundle contains a
85 changeset which is a child of a node not present in the
86 destination workspace (because the destination workspace is not as
87 up-to-date as the source), or because the source and destination
88 workspace are not related.
90 It may also happen in cases where the uncommitted changes need to
91 be applied onto a node that the workspace does not possess even
92 after application of the bundle (on a branch not present
93 in the bundle or destination workspace, for instance)'''
95 def __init__(self, msg, name):
97 # If e.name is a string 20 characters long, it is
98 # assumed to be a node. (Mercurial makes this
99 # same assumption, when creating a LookupError)
101 if isinstance(name, str) and len(name) == 20:
102 n = node.short(name)
103 else:
104 n = name
106 util.Abort.__init__(self, "%s: changeset '%s' is missing\n"
107 "Your workspace is either not "
108 "sufficiently up to date,\n"
109 "or is unrelated to the workspace from "
110 "which the backup was taken.\n" % (msg, n))
113 class CdmTarFile(tarfile.TarFile):
114 '''Tar file access + simple comparison to the filesystem, and
115 creation addition of files from Mercurial filectx objects.'''
117 def __init__(self, *args, **kwargs):
118 tarfile.TarFile.__init__(self, *args, **kwargs)
119 self.errorlevel = 2
121 def members_match_fs(self, rootpath):
122 '''Compare the contents of the tar archive to the directory
123 specified by rootpath. Return False if they differ.
125 Every file in the archive must match the equivalent file in
126 the filesystem.
128 The existence, modification time, and size of each file are
129 compared, content is not.'''
131 def _member_matches_fs(member, rootpath):
132 '''Compare a single member to its filesystem counterpart'''
133 fpath = os.path.join(rootpath, member.name)
135 if not os.path.exists(fpath):
136 return False
137 elif ((os.path.isfile(fpath) != member.isfile()) or
138 (os.path.isdir(fpath) != member.isdir()) or
139 (os.path.islink(fpath) != member.issym())):
140 return False
143 # The filesystem may return a modification time with a
144 # fractional component (as a float), whereas the tar format
145 # only stores it to the whole second, perform the comparison
146 # using integers (truncated, not rounded)
148 elif member.mtime != int(os.path.getmtime(fpath)):
149 return False
150 elif not member.isdir() and member.size != os.path.getsize(fpath):
151 return False
152 else:
153 return True
155 for elt in self:
156 if not _member_matches_fs(elt, rootpath):
157 return False
159 return True
161 def addfilectx(self, filectx, path=None):
162 '''Add a filectx object to the archive.
164 Use the path specified by the filectx object or, if specified,
165 the PATH argument.
167 The size, modification time, type and permissions of the tar
168 member are taken from the filectx object, user and group id
169 are those of the invoking user, user and group name are those
170 of the invoking user if information is available, or "unknown"
171 if it is not.
174 t = tarfile.TarInfo(path or filectx.path())
175 t.size = filectx.size()
176 t.mtime = filectx.date()[0]
177 t.uid = os.getuid()
178 t.gid = os.getgid()
180 try:
181 t.uname = pwd.getpwuid(t.uid).pw_name
182 except KeyError:
183 t.uname = "unknown"
185 try:
186 t.gname = grp.getgrgid(t.gid).gr_name
187 except KeyError:
188 t.gname = "unknown"
191 # Mercurial versions symlinks by setting a flag and storing
192 # the destination path in place of the file content. The
193 # actual contents (in the tar), should be empty.
195 if 'l' in filectx.flags():
196 t.type = tarfile.SYMTYPE
197 t.mode = 0777
198 t.linkname = filectx.data()
199 data = None
200 else:
201 t.type = tarfile.REGTYPE
202 t.mode = 'x' in filectx.flags() and 0755 or 0644
203 data = StringIO(filectx.data())
205 self.addfile(t, data)
208 class CdmCommittedBackup(object):
209 '''Backup of committed changes'''
211 def __init__(self, backup, ws):
212 self.ws = ws
213 self.bu = backup
214 self.files = ('bundle', 'nodes')
216 def _outgoing_nodes(self, parent):
217 '''Return a list of all outgoing nodes in hex format'''
219 if parent:
220 outgoing = self.ws.findoutgoing(parent)
221 nodes = self.ws.repo.changelog.nodesbetween(outgoing)[0]
222 return map(node.hex, nodes)
223 else:
224 return []
226 def backup(self):
227 '''Backup committed changes'''
228 parent = self.ws.parent()
230 if not parent:
231 self.ws.ui.warn('Workspace has no parent, committed changes will '
232 'not be backed up\n')
233 return
235 out = self.ws.findoutgoing(parent)
236 if not out:
237 return
239 cg = self.ws.repo.changegroup(out, 'bundle')
240 changegroup.writebundle(cg, self.bu.backupfile('bundle'), 'HG10BZ')
242 outnodes = self._outgoing_nodes(parent)
243 if not outnodes:
244 return
246 fp = None
247 try:
248 try:
249 fp = self.bu.open('nodes', 'w')
250 fp.write('%s\n' % '\n'.join(outnodes))
251 except EnvironmentError, e:
252 raise util.Abort("couldn't store outgoing nodes: %s" % e)
253 finally:
254 if fp and not fp.closed:
255 fp.close()
257 def restore(self):
258 '''Restore committed changes from backup'''
260 if not self.bu.exists('bundle'):
261 return
263 bpath = self.bu.backupfile('bundle')
264 f = None
265 try:
266 try:
267 f = self.bu.open('bundle')
268 bundle = changegroup.readbundle(f, bpath)
269 self.ws.repo.addchangegroup(bundle, 'strip',
270 'bundle:%s' % bpath)
271 except EnvironmentError, e:
272 raise util.Abort("couldn't restore committed changes: %s\n"
273 " %s" % (bpath, e))
274 except error.LookupError, e:
275 raise CdmNodeMissing("couldn't restore committed changes",
276 e.name)
277 finally:
278 if f and not f.closed:
279 f.close()
281 def need_backup(self):
282 '''Compare backup of committed changes to workspace'''
284 if self.bu.exists('nodes'):
285 f = None
286 try:
287 try:
288 f = self.bu.open('nodes')
289 bnodes = set(line.rstrip('\r\n') for line in f.readlines())
290 f.close()
291 except EnvironmentError, e:
292 raise util.Abort("couldn't open backup node list: %s" % e)
293 finally:
294 if f and not f.closed:
295 f.close()
296 else:
297 bnodes = set()
299 outnodes = set(self._outgoing_nodes(self.ws.parent()))
302 # If there are outgoing nodes not in the prior backup we need
303 # to take a new backup; it's fine if there are nodes in the
304 # old backup which are no longer outgoing, however.
306 if not outnodes <= bnodes:
307 return True
309 return False
311 def cleanup(self):
312 '''Remove backed up committed changes'''
314 for f in self.files:
315 self.bu.unlink(f)
318 class CdmUncommittedBackup(object):
319 '''Backup of uncommitted changes'''
321 def __init__(self, backup, ws):
322 self.ws = ws
323 self.bu = backup
324 self.wctx = self.ws.workingctx(worklist=True)
326 def _clobbering_renames(self):
327 '''Return a list of pairs of files representing renames/copies
328 that clobber already versioned files. [(old-name new-name)...]
332 # Note that this doesn't handle uncommitted merges
333 # as CdmUncommittedBackup itself doesn't.
335 parent = self.wctx.parents()[0]
337 ret = []
338 for fname in self.wctx.added() + self.wctx.modified():
339 rn = self.wctx.filectx(fname).renamed()
340 if rn and fname in parent:
341 ret.append((rn[0], fname))
342 return ret
344 def backup(self):
345 '''Backup uncommitted changes'''
347 if self.ws.merged():
348 raise util.Abort("Unable to backup an uncommitted merge.\n"
349 "Please complete your merge and commit")
351 dirstate = node.hex(self.wctx.parents()[0].node())
353 fp = None
354 try:
355 try:
356 fp = self.bu.open('dirstate', 'w')
357 fp.write(dirstate + '\n')
358 fp.close()
359 except EnvironmentError, e:
360 raise util.Abort("couldn't save working copy parent: %s" % e)
362 try:
363 fp = self.bu.open('renames', 'w')
364 for cons in self._clobbering_renames():
365 fp.write("%s %s\n" % cons)
366 fp.close()
367 except EnvironmentError, e:
368 raise util.Abort("couldn't save clobbering copies: %s" % e)
370 try:
371 fp = self.bu.open('diff', 'w')
372 match = self.ws.matcher(files=self.wctx.files())
373 fp.write(self.ws.diff(opts={'git': True}, match=match))
374 except EnvironmentError, e:
375 raise util.Abort("couldn't save working copy diff: %s" % e)
376 finally:
377 if fp and not fp.closed:
378 fp.close()
380 def _dirstate(self):
381 '''Return the desired working copy node from the backup'''
382 fp = None
383 try:
384 try:
385 fp = self.bu.open('dirstate')
386 dirstate = fp.readline().strip()
387 except EnvironmentError, e:
388 raise util.Abort("couldn't read saved parent: %s" % e)
389 finally:
390 if fp and not fp.closed:
391 fp.close()
393 return dirstate
395 def restore(self):
396 '''Restore uncommitted changes'''
397 dirstate = self._dirstate()
400 # Check that the patch's parent changeset exists.
402 try:
403 n = node.bin(dirstate)
404 self.ws.repo.changelog.lookup(n)
405 except error.LookupError, e:
406 raise CdmNodeMissing("couldn't restore uncommitted changes",
407 e.name)
409 try:
410 self.ws.clean(rev=dirstate)
411 except util.Abort, e:
412 raise util.Abort("couldn't update to saved node: %s" % e)
414 if not self.bu.exists('diff'):
415 return
418 # There's a race here whereby if the patch (or part thereof)
419 # is applied within the same second as the clean above (such
420 # that modification time doesn't change) and if the size of
421 # that file does not change, Hg may not see the change.
423 # We sleep a full second to avoid this, as sleeping merely
424 # until the next second begins would require very close clock
425 # synchronization on network filesystems.
427 time.sleep(1)
429 files = {}
430 try:
431 diff = self.bu.backupfile('diff')
432 try:
433 fuzz = patch.patch(diff, self.ws.ui, strip=1,
434 cwd=self.ws.repo.root, files=files)
435 if fuzz:
436 raise util.Abort('working copy diff applied with fuzz')
437 except Exception, e:
438 raise util.Abort("couldn't apply working copy diff: %s\n"
439 " %s" % (diff, e))
440 finally:
441 if Version.at_least("1.7"):
442 cmdutil.updatedir(self.ws.ui, self.ws.repo, files)
443 else:
444 patch.updatedir(self.ws.ui, self.ws.repo, files)
446 if not self.bu.exists('renames'):
447 return
450 # We need to re-apply name changes where the new name
451 # (rename/copy destination) is an already versioned file, as
452 # Hg would otherwise ignore them.
454 try:
455 fp = self.bu.open('renames')
456 for line in fp:
457 source, dest = line.strip().split()
458 self.ws.copy(source, dest)
459 except EnvironmentError, e:
460 raise util.Abort('unable to open renames file: %s' % e)
461 except ValueError:
462 raise util.Abort('corrupt renames file: %s' %
463 self.bu.backupfile('renames'))
465 def need_backup(self):
466 '''Compare backup of uncommitted changes to workspace'''
467 cnode = self.wctx.parents()[0].node()
468 if self._dirstate() != node.hex(cnode):
469 return True
471 fd = None
472 match = self.ws.matcher(files=self.wctx.files())
473 curdiff = self.ws.diff(opts={'git': True}, match=match)
475 try:
476 if self.bu.exists('diff'):
477 try:
478 fd = self.bu.open('diff')
479 backdiff = fd.read()
480 fd.close()
481 except EnvironmentError, e:
482 raise util.Abort("couldn't open backup diff %s\n"
483 " %s" % (self.bu.backupfile('diff'), e))
484 else:
485 backdiff = ''
487 if backdiff != curdiff:
488 return True
490 currrenamed = self._clobbering_renames()
491 bakrenamed = None
493 if self.bu.exists('renames'):
494 try:
495 fd = self.bu.open('renames')
496 bakrenamed = [tuple(line.strip().split(' ')) for line in fd]
497 fd.close()
498 except EnvironmentError, e:
499 raise util.Abort("couldn't open renames file %s: %s\n" %
500 (self.bu.backupfile('renames'), e))
502 if currrenamed != bakrenamed:
503 return True
504 finally:
505 if fd and not fd.closed:
506 fd.close()
508 return False
510 def cleanup(self):
511 '''Remove backed up uncommitted changes'''
513 for f in ('dirstate', 'diff', 'renames'):
514 self.bu.unlink(f)
517 class CdmMetadataBackup(object):
518 '''Backup of workspace metadata'''
520 def __init__(self, backup, ws):
521 self.bu = backup
522 self.ws = ws
523 self.files = ('hgrc', 'localtags', 'patches', 'cdm')
525 def backup(self):
526 '''Backup workspace metadata'''
528 tarpath = self.bu.backupfile('metadata.tar.gz')
531 # Files is a list of tuples (name, path), where name is as in
532 # self.files, and path is the absolute path.
534 files = filter(lambda (name, path): os.path.exists(path),
535 zip(self.files, map(self.ws.repo.join, self.files)))
537 if not files:
538 return
540 try:
541 tar = CdmTarFile.gzopen(tarpath, 'w')
542 except (EnvironmentError, tarfile.TarError), e:
543 raise util.Abort("couldn't open %s for writing: %s" %
544 (tarpath, e))
546 try:
547 for name, path in files:
548 try:
549 tar.add(path, name)
550 except (EnvironmentError, tarfile.TarError), e:
552 # tarfile.TarError doesn't include the tar member or file
553 # in question, so we have to do so ourselves.
555 if isinstance(e, tarfile.TarError):
556 errstr = "%s: %s" % (name, e)
557 else:
558 errstr = str(e)
560 raise util.Abort("couldn't backup metadata to %s:\n"
561 " %s" % (tarpath, errstr))
562 finally:
563 tar.close()
565 def old_restore(self):
566 '''Restore workspace metadata from an pre-tar backup'''
568 for fname in self.files:
569 if self.bu.exists(fname):
570 bfile = self.bu.backupfile(fname)
571 wfile = self.ws.repo.join(fname)
573 try:
574 shutil.copy2(bfile, wfile)
575 except EnvironmentError, e:
576 raise util.Abort("couldn't restore metadata from %s:\n"
577 " %s" % (bfile, e))
579 def tar_restore(self):
580 '''Restore workspace metadata (from a tar-style backup)'''
582 if not self.bu.exists('metadata.tar.gz'):
583 return
585 tarpath = self.bu.backupfile('metadata.tar.gz')
587 try:
588 tar = CdmTarFile.gzopen(tarpath)
589 except (EnvironmentError, tarfile.TarError), e:
590 raise util.Abort("couldn't open %s: %s" % (tarpath, e))
592 try:
593 for elt in tar:
594 try:
595 tar.extract(elt, path=self.ws.repo.path)
596 except (EnvironmentError, tarfile.TarError), e:
597 # Make sure the member name is in the exception message.
598 if isinstance(e, tarfile.TarError):
599 errstr = "%s: %s" % (elt.name, e)
600 else:
601 errstr = str(e)
603 raise util.Abort("couldn't restore metadata from %s:\n"
604 " %s" %
605 (tarpath, errstr))
606 finally:
607 if tar and not tar.closed:
608 tar.close()
610 def restore(self):
611 '''Restore workspace metadata'''
613 if self.bu.exists('hgrc'):
614 self.old_restore()
615 else:
616 self.tar_restore()
618 def _walk(self):
619 '''Yield the repo-relative path to each file we operate on,
620 including each file within any affected directory'''
622 for elt in self.files:
623 path = self.ws.repo.join(elt)
625 if not os.path.exists(path):
626 continue
628 if os.path.isdir(path):
629 for root, dirs, files in os.walk(path, topdown=True):
630 yield root
632 for f in files:
633 yield os.path.join(root, f)
634 else:
635 yield path
637 def need_backup(self):
638 '''Compare backed up workspace metadata to workspace'''
640 def strip_trailing_pathsep(pathname):
641 '''Remove a possible trailing path separator from PATHNAME'''
642 return pathname.endswith('/') and pathname[:-1] or pathname
644 if self.bu.exists('metadata.tar.gz'):
645 tarpath = self.bu.backupfile('metadata.tar.gz')
646 try:
647 tar = CdmTarFile.gzopen(tarpath)
648 except (EnvironmentError, tarfile.TarError), e:
649 raise util.Abort("couldn't open metadata tarball: %s\n"
650 " %s" % (tarpath, e))
652 if not tar.members_match_fs(self.ws.repo.path):
653 tar.close()
654 return True
656 tarnames = map(strip_trailing_pathsep, tar.getnames())
657 tar.close()
658 else:
659 tarnames = []
661 repopath = self.ws.repo.path
662 if not repopath.endswith('/'):
663 repopath += '/'
665 for path in self._walk():
666 if path.replace(repopath, '', 1) not in tarnames:
667 return True
669 return False
671 def cleanup(self):
672 '''Remove backed up workspace metadata'''
673 self.bu.unlink('metadata.tar.gz')
676 class CdmClearBackup(object):
677 '''A backup (in tar format) of complete source files from every
678 workspace head.
680 Paths in the tarball are prefixed by the revision and node of the
681 head, or "working" for the working directory.
683 This is done purely for the benefit of the user, and as such takes
684 no part in restore or need_backup checking, restore always
685 succeeds, need_backup always returns False
688 def __init__(self, backup, ws):
689 self.bu = backup
690 self.ws = ws
692 def _branch_pairs(self):
693 '''Return a list of tuples (parenttip, localtip) for each
694 outgoing head. If the working copy contains modified files,
695 it is a head, and neither of its parents are.
698 parent = self.ws.parent()
700 if parent:
701 outgoing = self.ws.findoutgoing(parent)
702 outnodes = set(self.ws.repo.changelog.nodesbetween(outgoing)[0])
704 heads = [self.ws.repo.changectx(n) for n in self.ws.repo.heads()
705 if n in outnodes]
706 else:
707 heads = []
708 outnodes = []
710 wctx = self.ws.workingctx()
711 if wctx.files(): # We only care about file changes.
712 heads = filter(lambda x: x not in wctx.parents(), heads) + [wctx]
714 pairs = []
715 for head in heads:
716 if head.rev() is None:
717 c = head.parents()
718 else:
719 c = [head]
721 pairs.append((self.ws.parenttip(c, outnodes), head))
722 return pairs
724 def backup(self):
725 '''Save a clear copy of each source file modified between each
726 head and that head's parenttip (see WorkSpace.parenttip).
729 tarpath = self.bu.backupfile('clear.tar.gz')
730 branches = self._branch_pairs()
732 if not branches:
733 return
735 try:
736 tar = CdmTarFile.gzopen(tarpath, 'w')
737 except (EnvironmentError, tarfile.TarError), e:
738 raise util.Abort("Could not open %s for writing: %s" %
739 (tarpath, e))
741 try:
742 for parent, child in branches:
743 tpath = child.node() and node.short(child.node()) or "working"
745 for fname, change in self.ws.status(parent, child).iteritems():
746 if change not in ('added', 'modified'):
747 continue
749 try:
750 tar.addfilectx(child.filectx(fname),
751 os.path.join(tpath, fname))
752 except ValueError, e:
753 crev = child.rev()
754 if crev is None:
755 crev = "working copy"
756 raise util.Abort("Could not backup clear file %s "
757 "from %s: %s\n" % (fname, crev, e))
758 finally:
759 tar.close()
761 def cleanup(self):
762 '''Cleanup a failed Clear backup.
764 Remove the clear tarball from the backup directory.
767 self.bu.unlink('clear.tar.gz')
769 def restore(self):
770 '''Clear backups are never restored, do nothing'''
771 pass
773 def need_backup(self):
774 '''Clear backups are never compared, return False (no backup needed).
776 Should a backup actually be needed, one of the other
777 implementation classes would notice in any situation we would.
780 return False
783 class CdmBackup(object):
784 '''A backup of a given workspace'''
786 def __init__(self, ui, ws, name):
787 self.ws = ws
788 self.ui = ui
789 self.backupdir = self._find_backup_dir(name)
792 # The order of instances here controls the order the various operations
793 # are run.
795 # There's some inherent dependence, in that on restore we need
796 # to restore committed changes prior to uncommitted changes
797 # (as the parent revision of any uncommitted changes is quite
798 # likely to not exist until committed changes are restored).
799 # Metadata restore can happen at any point, but happens last
800 # as a matter of convention.
802 self.modules = [x(self, ws) for x in [CdmCommittedBackup,
803 CdmUncommittedBackup,
804 CdmClearBackup,
805 CdmMetadataBackup]]
807 if os.path.exists(os.path.join(self.backupdir, 'latest')):
808 generation = os.readlink(os.path.join(self.backupdir, 'latest'))
809 self.generation = int(os.path.split(generation)[1])
810 else:
811 self.generation = 0
813 def _find_backup_dir(self, name):
814 '''Find the path to an appropriate backup directory based on NAME'''
816 if os.path.isabs(name):
817 return name
819 if self.ui.config('cdm', 'backupdir'):
820 backupbase = os.path.expanduser(self.ui.config('cdm', 'backupdir'))
821 else:
822 home = None
824 try:
825 home = os.getenv('HOME') or pwd.getpwuid(os.getuid()).pw_dir
826 except KeyError:
827 pass # Handled anyway
829 if not home:
830 raise util.Abort('Could not determine your HOME directory to '
831 'find backup path')
833 backupbase = os.path.join(home, 'cdm.backup')
835 backupdir = os.path.join(backupbase, name)
837 # If backupdir exists, it must be a directory.
838 if (os.path.exists(backupdir) and not os.path.isdir(backupdir)):
839 raise util.Abort('%s exists but is not a directory' % backupdir)
841 return backupdir
843 def _update_latest(self, gen):
844 '''Update latest symlink to point to the current generation'''
845 linkpath = os.path.join(self.backupdir, 'latest')
847 if os.path.lexists(linkpath):
848 os.unlink(linkpath)
850 os.symlink(str(gen), linkpath)
852 def _create_gen(self, gen):
853 '''Create a new backup generation'''
854 try:
855 os.makedirs(os.path.join(self.backupdir, str(gen)))
856 self._update_latest(gen)
857 except EnvironmentError, e:
858 raise util.Abort("Couldn't create backup generation %s: %s" %
859 (os.path.join(self.backupdir, str(gen)), e))
861 def backupfile(self, path):
862 '''return full path to backup file FILE at GEN'''
863 return os.path.join(self.backupdir, str(self.generation), path)
865 def unlink(self, name):
866 '''Unlink the specified path from the backup directory.
867 A no-op if the path does not exist.
870 fpath = self.backupfile(name)
871 if os.path.exists(fpath):
872 os.unlink(fpath)
874 def open(self, name, mode='r'):
875 '''Open the specified file in the backup directory'''
876 return open(self.backupfile(name), mode)
878 def exists(self, name):
879 '''Return boolean indicating wether a given file exists in the
880 backup directory.'''
881 return os.path.exists(self.backupfile(name))
883 def need_backup(self):
884 '''Compare backed up changes to workspace'''
886 # If there's no current backup generation, or the last backup was
887 # invalid (lacking the dirstate file), we need a backup regardless
888 # of anything else.
890 if not self.generation or not self.exists('dirstate'):
891 return True
893 for x in self.modules:
894 if x.need_backup():
895 return True
897 return False
899 def backup(self):
900 '''Take a backup of the current workspace
902 Calling code is expected to hold both the working copy lock
903 and repository lock.'''
905 if not os.path.exists(self.backupdir):
906 try:
907 os.makedirs(self.backupdir)
908 except EnvironmentError, e:
909 raise util.Abort('Could not create backup directory %s: %s' %
910 (self.backupdir, e))
912 self.generation += 1
913 self._create_gen(self.generation)
915 try:
916 for x in self.modules:
917 x.backup()
918 except Exception, e:
919 if isinstance(e, KeyboardInterrupt):
920 self.ws.ui.warn("Interrupted\n")
921 else:
922 self.ws.ui.warn("Error: %s\n" % e)
923 show_traceback = self.ws.ui.configbool('ui', 'traceback',
924 False)
927 # If it's not a 'normal' error, we want to print a stack
928 # trace now in case the attempt to remove the partial
929 # backup also fails, and raises a second exception.
931 if (not isinstance(e, (EnvironmentError, util.Abort))
932 or show_traceback):
933 traceback.print_exc()
935 for x in self.modules:
936 x.cleanup()
938 os.rmdir(os.path.join(self.backupdir, str(self.generation)))
939 self.generation -= 1
941 if self.generation != 0:
942 self._update_latest(self.generation)
943 else:
944 os.unlink(os.path.join(self.backupdir, 'latest'))
946 raise util.Abort('Backup failed')
948 def restore(self, gen=None):
949 '''Restore workspace from backup
951 Restores from backup generation GEN (defaulting to the latest)
952 into workspace WS.
954 Calling code is expected to hold both the working copy lock
955 and repository lock of the destination workspace.'''
957 if not os.path.exists(self.backupdir):
958 raise util.Abort('Backup directory does not exist: %s' %
959 (self.backupdir))
961 if gen:
962 if not os.path.exists(os.path.join(self.backupdir, str(gen))):
963 raise util.Abort('Backup generation does not exist: %s' %
964 (os.path.join(self.backupdir, str(gen))))
965 self.generation = int(gen)
967 if not self.generation: # This is OK, 0 is not a valid generation
968 raise util.Abort('Backup has no generations: %s' % self.backupdir)
970 if not self.exists('dirstate'):
971 raise util.Abort('Backup %s/%s is incomplete (dirstate missing)' %
972 (self.backupdir, self.generation))
974 try:
975 for x in self.modules:
976 x.restore()
977 except util.Abort, e:
978 raise util.Abort('Error restoring workspace:\n'
979 '%s\n'
980 'Workspace may be partially restored' % e)