1 # Copyright (C) 2009, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
4 import os
, subprocess
, shutil
, sys
, re
5 from xml
.dom
import minidom
6 from zeroinstall
import SafeException
7 from zeroinstall
.injector
import model
8 from zeroinstall
.support
import ro_rmtree
9 from logging
import info
, warn
11 sys
.path
.insert(0, os
.environ
['RELEASE_0REPO'])
12 from repo
import registry
, merge
14 import support
, compile
15 from scm
import get_scm
17 XMLNS_RELEASE
= 'http://zero-install.sourceforge.net/2007/namespaces/0release'
19 valid_phases
= ['commit-release', 'generate-archive']
21 TMP_BRANCH_NAME
= '0release-tmp'
23 test_command
= os
.environ
['0TEST']
25 def run_unit_tests(local_feed
):
26 print "Running self-tests..."
27 exitstatus
= subprocess
.call([test_command
, '--', local_feed
])
29 print "SKIPPED unit tests for %s (no 'test' command)" % local_feed
32 raise SafeException("Self-test failed with exit status %d" % exitstatus
)
34 def upload_archives(options
, status
, uploads
):
35 # For each binary or source archive in uploads, ensure it is available
36 # from options.archive_dir_public_url
38 # We try to do all the uploads together first, and then verify them all
39 # afterwards. This is because we may have to wait for them to be moved
40 # from an incoming queue before we can test them.
43 return support
.get_archive_url(options
, status
.release_version
, archive
)
45 # Check that url exists and has the given size
46 def is_uploaded(url
, size
):
47 if url
.startswith('http://TESTING/releases'):
50 print "Testing URL %s..." % url
52 actual_size
= int(support
.get_size(url
))
54 print "Can't get size of '%s': %s" % (url
, ex
)
57 if actual_size
== size
:
59 print "WARNING: %s exists, but size is %d, not %d!" % (url
, actual_size
, size
)
62 # status.verified_uploads is an array of status flags:
64 'N': 'Upload required',
65 'A': 'Upload has been attempted, but we need to check whether it worked',
66 'V': 'Upload has been checked (exists and has correct size)',
69 if status
.verified_uploads
is None:
70 # First time around; no point checking for existing uploads
71 status
.verified_uploads
= 'N' * len(uploads
)
75 print "\nUpload status:"
76 for i
, stat
in enumerate(status
.verified_uploads
):
77 print "- %s : %s" % (uploads
[i
], description
[stat
])
81 if status
.verified_uploads
== 'V' * len(uploads
):
84 # Find all New archives
86 for i
, stat
in enumerate(status
.verified_uploads
):
89 to_upload
.append(uploads
[i
])
90 print "Upload %s/%s as %s" % (status
.release_version
, uploads
[i
], url(uploads
[i
]))
92 cmd
= options
.archive_upload_command
.strip()
95 # Mark all New items as Attempted
96 status
.verified_uploads
= status
.verified_uploads
.replace('N', 'A')
101 support
.show_and_run(cmd
, to_upload
)
103 if len(to_upload
) == 1:
104 print "No upload command is set => please upload the archive manually now"
105 raw_input('Press Return once the archive is uploaded.')
107 print "No upload command is set => please upload the archives manually now"
108 raw_input('Press Return once the %d archives are uploaded.' % len(to_upload
))
110 # Verify all Attempted uploads
112 for i
, stat
in enumerate(status
.verified_uploads
):
113 assert stat
in 'AV', status
.verified_uploads
115 if not is_uploaded(url(uploads
[i
]), os
.path
.getsize(uploads
[i
])):
116 print "** Archive '%s' still not uploaded! Try again..." % uploads
[i
]
122 status
.verified_uploads
= new_stat
125 if 'N' in new_stat
and cmd
:
126 raw_input('Press Return to try again.')
128 legacy_warning
= """*** Note: the upload functions of 0release
129 *** (--archive-dir-public-url, --master-feed-file, --archive-upload-command
130 *** and --master-feed-upload-command) are being replaced by 0repo. They may
131 *** go away in future. If 0repo is not suitable for your needs, please
132 *** contact the mailing list to let us know.
134 *** http://www.0install.net/0repo.html
135 *** http://www.0install.net/support.html#lists
138 def do_version_substitutions(impl_dir
, version_substitutions
, new_version
):
139 for (rel_path
, subst
) in version_substitutions
:
140 assert not os
.path
.isabs(rel_path
), rel_path
141 path
= os
.path
.join(impl_dir
, rel_path
)
142 with
open(path
, 'rt') as stream
:
145 match
= subst
.search(data
)
147 orig
= match
.group(0)
149 if match
.lastindex
!= 1:
150 raise SafeException("Regex '%s' must have exactly one matching () group" % subst
.pattern
)
151 assert span
[0] >= 0, "Version match group did not match (regexp=%s; match=%s)" % (subst
.pattern
, orig
)
152 new_data
= data
[:span
[0]] + new_version
+ data
[span
[1]:]
154 raise SafeException("No matches for regex '%s' in '%s'" % (subst
.pattern
, path
))
156 with
open(path
, 'wt') as stream
:
157 stream
.write(new_data
)
159 def do_release(local_feed
, options
):
160 if options
.master_feed_file
or options
.archive_dir_public_url
or options
.archive_upload_command
or options
.master_feed_upload_command
:
161 print(legacy_warning
)
163 if options
.master_feed_file
:
164 options
.master_feed_file
= os
.path
.abspath(options
.master_feed_file
)
166 if not local_feed
.feed_for
:
167 raise SafeException("Feed %s missing a <feed-for> element" % local_feed
.local_path
)
169 status
= support
.Status()
170 local_impl
= support
.get_singleton_impl(local_feed
)
172 local_impl_dir
= local_impl
.id
173 assert os
.path
.isabs(local_impl_dir
)
174 local_impl_dir
= os
.path
.realpath(local_impl_dir
)
175 assert os
.path
.isdir(local_impl_dir
)
176 assert local_feed
.local_path
.startswith(local_impl_dir
+ os
.sep
)
178 # From the impl directory to the feed
179 # NOT relative to the archive root (in general)
180 local_iface_rel_path
= local_feed
.local_path
[len(local_impl_dir
) + 1:]
181 assert not local_iface_rel_path
.startswith('/')
182 assert os
.path
.isfile(os
.path
.join(local_impl_dir
, local_iface_rel_path
))
185 for phase
in valid_phases
:
186 phase_actions
[phase
] = [] # List of <release:action> elements
188 version_substitutions
= []
190 add_toplevel_dir
= None
191 release_management
= local_feed
.get_metadata(XMLNS_RELEASE
, 'management')
192 if len(release_management
) == 1:
193 info("Found <release:management> element.")
194 release_management
= release_management
[0]
195 for x
in release_management
.childNodes
:
196 if x
.uri
== XMLNS_RELEASE
and x
.name
== 'action':
197 phase
= x
.getAttribute('phase')
198 if phase
not in valid_phases
:
199 raise SafeException("Invalid action phase '%s' in local feed %s. Valid actions are:\n%s" % (phase
, local_feed
.local_path
, '\n'.join(valid_phases
)))
200 phase_actions
[phase
].append(x
.content
)
201 elif x
.uri
== XMLNS_RELEASE
and x
.name
== 'update-version':
202 version_substitutions
.append((x
.getAttribute('path'), re
.compile(x
.content
, re
.MULTILINE
)))
203 elif x
.uri
== XMLNS_RELEASE
and x
.name
== 'add-toplevel-directory':
204 add_toplevel_dir
= local_feed
.get_name()
206 warn("Unknown <release:management> element: %s", x
)
207 elif len(release_management
) > 1:
208 raise SafeException("Multiple <release:management> sections in %s!" % local_feed
)
210 info("No <release:management> element found in local feed.")
212 scm
= get_scm(local_feed
, options
)
214 # Path relative to the archive / SCM root
215 local_iface_rel_root_path
= local_feed
.local_path
[len(scm
.root_dir
) + 1:]
217 def run_hooks(phase
, cwd
, env
):
218 info("Running hooks for phase '%s'" % phase
)
219 full_env
= os
.environ
.copy()
221 for x
in phase_actions
[phase
]:
222 print "[%s]: %s" % (phase
, x
)
223 support
.check_call(x
, shell
= True, cwd
= cwd
, env
= full_env
)
225 def set_to_release():
226 print "Snapshot version is " + local_impl
.get_version()
227 release_version
= options
.release_version
228 if release_version
is None:
229 suggested
= support
.suggest_release_version(local_impl
.get_version())
230 release_version
= raw_input("Version number for new release [%s]: " % suggested
)
231 if not release_version
:
232 release_version
= suggested
234 scm
.ensure_no_tag(release_version
)
236 status
.head_before_release
= scm
.get_head_revision()
239 working_copy
= local_impl
.id
240 do_version_substitutions(local_impl_dir
, version_substitutions
, release_version
)
241 run_hooks('commit-release', cwd
= working_copy
, env
= {'RELEASE_VERSION': release_version
})
243 print "Releasing version", release_version
244 support
.publish(local_feed
.local_path
, set_released
= 'today', set_version
= release_version
)
246 support
.backup_if_exists(release_version
)
247 os
.mkdir(release_version
)
248 os
.chdir(release_version
)
250 status
.old_snapshot_version
= local_impl
.get_version()
251 status
.release_version
= release_version
252 status
.head_at_release
= scm
.commit('Release %s' % release_version
, branch
= TMP_BRANCH_NAME
, parent
= 'HEAD')
255 def set_to_snapshot(snapshot_version
):
256 assert snapshot_version
.endswith('-post')
257 support
.publish(local_feed
.local_path
, set_released
= '', set_version
= snapshot_version
)
258 do_version_substitutions(local_impl_dir
, version_substitutions
, snapshot_version
)
259 scm
.commit('Start development series %s' % snapshot_version
, branch
= TMP_BRANCH_NAME
, parent
= TMP_BRANCH_NAME
)
260 status
.new_snapshot_version
= scm
.get_head_revision()
263 def ensure_ready_to_release():
264 #if not options.master_feed_file:
265 # raise SafeException("Master feed file not set! Check your configuration")
267 scm
.ensure_committed()
268 scm
.ensure_versioned(os
.path
.abspath(local_feed
.local_path
))
269 info("No uncommitted changes. Good.")
270 # Not needed for GIT. For SCMs where tagging is expensive (e.g. svn) this might be useful.
271 #run_unit_tests(local_impl)
273 scm
.grep('\(^\\|[^=]\)\<\\(TODO\\|XXX\\|FIXME\\)\>')
275 def create_feed(target_feed
, local_iface_path
, archive_file
, archive_name
, main
):
276 shutil
.copyfile(local_iface_path
, target_feed
)
278 support
.publish(target_feed
,
280 archive_url
= support
.get_archive_url(options
, status
.release_version
, os
.path
.basename(archive_file
)),
281 archive_file
= archive_file
,
282 archive_extract
= archive_name
)
284 def get_previous_release(this_version
):
285 """Return the highest numbered verison in the master feed before this_version.
286 @return: version, or None if there wasn't one"""
287 parsed_release_version
= model
.parse_version(this_version
)
289 versions
= [model
.parse_version(version
) for version
in scm
.get_tagged_versions()]
290 versions
= [version
for version
in versions
if version
< parsed_release_version
]
293 return model
.format_version(max(versions
))
296 def export_changelog(previous_release
):
297 changelog
= file('changelog-%s' % status
.release_version
, 'w')
300 scm
.export_changelog(previous_release
, status
.head_before_release
, changelog
)
301 except SafeException
, ex
:
302 print "WARNING: Failed to generate changelog: " + str(ex
)
304 print "Wrote changelog from %s to here as %s" % (previous_release
or 'start', changelog
.name
)
308 def fail_candidate():
310 assert cwd
.endswith(status
.release_version
)
311 support
.backup_if_exists(cwd
)
312 scm
.delete_branch(TMP_BRANCH_NAME
)
313 os
.unlink(support
.release_status_file
)
314 print "Restored to state before starting release. Make your fixes and try again..."
316 def release_via_0repo(new_impls_feed
):
318 support
.make_archives_relative(new_impls_feed
)
321 repo
.cmd
.main(['0repo', 'add', '--', new_impls_feed
])
325 def release_without_0repo(archive_file
, new_impls_feed
):
326 assert options
.master_feed_file
328 if not options
.archive_dir_public_url
:
329 raise SafeException("Archive directory public URL is not set! Edit configuration and try again.")
331 if status
.updated_master_feed
:
332 print "Already added to master feed. Not changing."
335 if os
.path
.exists(options
.master_feed_file
):
336 # Check we haven't already released this version
337 master
= support
.load_feed(os
.path
.realpath(options
.master_feed_file
))
338 existing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == status
.release_version
]
339 if len(existing_releases
):
340 raise SafeException("Master feed %s already contains an implementation with version number %s!" % (options
.master_feed_file
, status
.release_version
))
342 previous_release
= get_previous_release(status
.release_version
)
343 previous_testing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == previous_release
344 and impl
.upstream_stability
== model
.stability_levels
["testing"]]
345 if previous_testing_releases
:
346 print "The previous release, version %s, is still marked as 'testing'. Set to stable?" % previous_release
347 if support
.get_choice(['Yes', 'No']) == 'Yes':
348 publish_opts
['select_version'] = previous_release
349 publish_opts
['set_stability'] = "stable"
351 support
.publish(options
.master_feed_file
, local
= new_impls_feed
, xmlsign
= True, key
= options
.key
, **publish_opts
)
353 status
.updated_master_feed
= 'true'
357 uploads
= [os
.path
.basename(archive_file
)]
358 for b
in compiler
.get_binary_feeds():
359 binary_feed
= support
.load_feed(b
)
360 impl
, = binary_feed
.implementations
.values()
361 uploads
.append(os
.path
.basename(impl
.download_sources
[0].url
))
363 upload_archives(options
, status
, uploads
)
365 feed_base
= os
.path
.dirname(list(local_feed
.feed_for
)[0])
366 feed_files
= [options
.master_feed_file
]
367 print "Upload %s into %s" % (', '.join(feed_files
), feed_base
)
368 cmd
= options
.master_feed_upload_command
.strip()
370 support
.show_and_run(cmd
, feed_files
)
372 print "NOTE: No feed upload command set => you'll have to upload them yourself!"
374 def accept_and_publish(archive_file
, src_feed_name
):
376 print "Already tagged in SCM. Not re-tagging."
378 scm
.ensure_committed()
379 head
= scm
.get_head_revision()
380 if head
!= status
.head_before_release
:
381 raise SafeException("Changes committed since we started!\n" +
382 "HEAD was " + status
.head_before_release
+ "\n"
385 scm
.tag(status
.release_version
, status
.head_at_release
)
386 scm
.reset_hard(TMP_BRANCH_NAME
)
387 scm
.delete_branch(TMP_BRANCH_NAME
)
389 status
.tagged
= 'true'
392 assert len(local_feed
.feed_for
) == 1
394 # Merge the source and binary feeds together first, so
395 # that we update the master feed atomically and only
396 # have to sign it once.
397 with
open(src_feed_name
, 'rb') as stream
:
398 doc
= minidom
.parse(stream
)
399 for b
in compiler
.get_binary_feeds():
400 with
open(b
, 'rb') as stream
:
401 bin_doc
= minidom
.parse(b
)
402 merge
.merge(doc
, bin_doc
)
403 new_impls_feed
= 'merged.xml'
404 with
open(new_impls_feed
, 'wb') as stream
:
407 # TODO: support uploading to a sub-feed (requires support in 0repo too)
408 master_feed
, = local_feed
.feed_for
409 repository
= registry
.lookup(master_feed
, missing_ok
= True)
411 release_via_0repo(new_impls_feed
)
413 release_without_0repo(archive_file
, new_impls_feed
)
415 os
.unlink(new_impls_feed
)
417 print "Push changes to public SCM repository..."
418 public_repos
= options
.public_scm_repository
420 scm
.push_head_and_release(status
.release_version
)
422 print "NOTE: No public repository set => you'll have to push the tag and trunk yourself."
424 os
.unlink(support
.release_status_file
)
426 if status
.head_before_release
:
427 head
= scm
.get_head_revision()
428 if status
.release_version
:
429 print "RESUMING release of %s %s" % (local_feed
.get_name(), status
.release_version
)
430 if options
.release_version
and options
.release_version
!= status
.release_version
:
431 raise SafeException("Can't start release of version %s; we are currently releasing %s.\nDelete the release-status file to abort the previous release." % (options
.release_version
, status
.release_version
))
432 elif head
== status
.head_before_release
:
433 print "Restarting release of %s (HEAD revision has not changed)" % local_feed
.get_name()
435 raise SafeException("Something went wrong with the last run:\n" +
436 "HEAD revision for last run was " + status
.head_before_release
+ "\n" +
437 "HEAD revision now is " + head
+ "\n" +
438 "You should revert your working copy to the previous head and try again.\n" +
439 "If you're sure you want to release from the current head, delete '" + support
.release_status_file
+ "'")
441 print "Releasing", local_feed
.get_name()
443 ensure_ready_to_release()
445 if status
.release_version
:
446 if not os
.path
.isdir(status
.release_version
):
447 raise SafeException("Can't resume; directory %s missing. Try deleting '%s'." % (status
.release_version
, support
.release_status_file
))
448 os
.chdir(status
.release_version
)
449 need_set_snapshot
= False
451 print "Already tagged. Resuming the publishing process..."
452 elif status
.new_snapshot_version
:
453 head
= scm
.get_head_revision()
454 if head
!= status
.head_before_release
:
455 raise SafeException("There are more commits since we started!\n"
456 "HEAD was " + status
.head_before_release
+ "\n"
457 "HEAD now " + head
+ "\n"
458 "To include them, delete '" + support
.release_status_file
+ "' and try again.\n"
459 "To leave them out, put them on a new branch and reset HEAD to the release version.")
461 raise SafeException("Something went wrong previously when setting the new snapshot version.\n" +
462 "Suggest you reset to the original HEAD of\n%s and delete '%s'." % (status
.head_before_release
, support
.release_status_file
))
464 set_to_release() # Changes directory
465 assert status
.release_version
466 need_set_snapshot
= True
468 # May be needed by the upload command
469 os
.environ
['RELEASE_VERSION'] = status
.release_version
471 archive_name
= support
.make_archive_name(local_feed
.get_name(), status
.release_version
)
472 archive_file
= archive_name
+ '.tar.bz2'
474 export_prefix
= archive_name
475 if add_toplevel_dir
is not None:
476 export_prefix
+= os
.sep
+ add_toplevel_dir
478 if status
.created_archive
and os
.path
.isfile(archive_file
):
479 print "Archive already created"
481 support
.backup_if_exists(archive_file
)
482 scm
.export(export_prefix
, archive_file
, status
.head_at_release
)
484 has_submodules
= scm
.has_submodules()
486 if phase_actions
['generate-archive'] or has_submodules
:
488 support
.unpack_tarball(archive_file
)
490 scm
.export_submodules(archive_name
)
491 run_hooks('generate-archive', cwd
= archive_name
, env
= {'RELEASE_VERSION': status
.release_version
})
492 info("Regenerating archive (may have been modified by generate-archive hooks...")
493 support
.check_call(['tar', 'cjf', archive_file
, archive_name
])
494 except SafeException
:
495 scm
.reset_hard(scm
.get_current_branch())
499 status
.created_archive
= 'true'
502 if need_set_snapshot
:
503 set_to_snapshot(status
.release_version
+ '-post')
504 # Revert back to the original revision, so that any fixes the user makes
505 # will get applied before the tag
506 scm
.reset_hard(scm
.get_current_branch())
508 #backup_if_exists(archive_name)
509 support
.unpack_tarball(archive_file
)
511 extracted_feed_path
= os
.path
.abspath(os
.path
.join(export_prefix
, local_iface_rel_root_path
))
512 assert os
.path
.isfile(extracted_feed_path
), "Local feed not in archive! Is it under version control?"
513 extracted_feed
= support
.load_feed(extracted_feed_path
)
514 extracted_impl
= support
.get_singleton_impl(extracted_feed
)
516 if extracted_impl
.main
:
517 # Find main executable, relative to the archive root
518 abs_main
= os
.path
.join(os
.path
.dirname(extracted_feed_path
), extracted_impl
.id, extracted_impl
.main
)
519 main
= os
.path
.relpath(abs_main
, archive_name
+ os
.sep
)
520 if main
!= extracted_impl
.main
:
521 print "(adjusting main: '%s' for the feed inside the archive, '%s' externally)" % (extracted_impl
.main
, main
)
522 # XXX: this is going to fail if the feed uses the new <command> syntax
523 if not os
.path
.exists(abs_main
):
524 raise SafeException("Main executable '%s' not found after unpacking archive!" % abs_main
)
525 if main
== extracted_impl
.main
:
526 main
= None # Don't change the main attribute
531 if status
.src_tests_passed
:
532 print "Unit-tests already passed - not running again"
534 # Make directories read-only (checks tests don't write)
535 support
.make_readonly_recursive(archive_name
)
537 run_unit_tests(extracted_feed_path
)
538 status
.src_tests_passed
= True
540 except SafeException
:
541 print "(leaving extracted directory for examination)"
544 # Unpack it again in case the unit-tests changed anything
545 ro_rmtree(archive_name
)
546 support
.unpack_tarball(archive_file
)
548 # Generate feed for source
549 src_feed_name
= '%s.xml' % archive_name
550 create_feed(src_feed_name
, extracted_feed_path
, archive_file
, archive_name
, main
)
551 print "Wrote source feed as %s" % src_feed_name
553 # If it's a source package, compile the binaries now...
554 compiler
= compile.Compiler(options
, os
.path
.abspath(src_feed_name
), release_version
= status
.release_version
)
555 compiler
.build_binaries()
557 previous_release
= get_previous_release(status
.release_version
)
558 export_changelog(previous_release
)
561 raw_input('Already tagged. Press Return to resume publishing process...')
564 print "\nCandidate release archive:", archive_file
565 print "(extracted to %s for inspection)" % os
.path
.abspath(archive_name
)
567 print "\nPlease check candidate and select an action:"
568 print "P) Publish candidate (accept)"
569 print "F) Fail candidate (delete release-status file)"
571 print "D) Diff against release archive for %s" % previous_release
572 maybe_diff
= ['Diff']
575 print "(you can also hit CTRL-C and resume this script when done)"
578 choice
= support
.get_choice(['Publish', 'Fail'] + maybe_diff
)
580 previous_archive_name
= support
.make_archive_name(local_feed
.get_name(), previous_release
)
581 previous_archive_file
= '..' + os
.sep
+ previous_release
+ os
.sep
+ previous_archive_name
+ '.tar.bz2'
583 # For archives created by older versions of 0release
584 if not os
.path
.isfile(previous_archive_file
):
585 old_previous_archive_file
= '..' + os
.sep
+ previous_archive_name
+ '.tar.bz2'
586 if os
.path
.isfile(old_previous_archive_file
):
587 previous_archive_file
= old_previous_archive_file
589 if os
.path
.isfile(previous_archive_file
):
590 support
.unpack_tarball(previous_archive_file
)
592 support
.show_diff(previous_archive_name
, archive_name
)
594 shutil
.rmtree(previous_archive_name
)
597 print "Sorry, archive file %s not found! Can't show diff." % previous_archive_file
601 info("Deleting extracted archive %s", archive_name
)
602 shutil
.rmtree(archive_name
)
604 if choice
== 'Publish':
605 accept_and_publish(archive_file
, src_feed_name
)
607 assert choice
== 'Fail'