1 # Copyright (C) 2009, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
4 import os
, subprocess
, shutil
, sys
5 from xml
.dom
import minidom
6 from zeroinstall
import SafeException
7 from zeroinstall
.injector
import model
8 from zeroinstall
.support
import ro_rmtree
9 from logging
import info
, warn
11 sys
.path
.insert(0, os
.environ
['RELEASE_0REPO'])
12 from repo
import registry
, merge
14 import support
, compile
15 from scm
import get_scm
17 XMLNS_RELEASE
= 'http://zero-install.sourceforge.net/2007/namespaces/0release'
19 valid_phases
= ['commit-release', 'generate-archive']
21 TMP_BRANCH_NAME
= '0release-tmp'
23 test_command
= os
.environ
['0TEST']
25 def run_unit_tests(local_feed
):
26 print "Running self-tests..."
27 exitstatus
= subprocess
.call([test_command
, '--', local_feed
])
29 print "SKIPPED unit tests for %s (no 'self-test' attribute set)" % local_feed
32 raise SafeException("Self-test failed with exit status %d" % exitstatus
)
34 def upload_archives(options
, status
, uploads
):
35 # For each binary or source archive in uploads, ensure it is available
36 # from options.archive_dir_public_url
38 # We try to do all the uploads together first, and then verify them all
39 # afterwards. This is because we may have to wait for them to be moved
40 # from an incoming queue before we can test them.
43 return support
.get_archive_url(options
, status
.release_version
, archive
)
45 # Check that url exists and has the given size
46 def is_uploaded(url
, size
):
47 if url
.startswith('http://TESTING/releases'):
50 print "Testing URL %s..." % url
52 actual_size
= int(support
.get_size(url
))
54 print "Can't get size of '%s': %s" % (url
, ex
)
57 if actual_size
== size
:
59 print "WARNING: %s exists, but size is %d, not %d!" % (url
, actual_size
, size
)
62 # status.verified_uploads is an array of status flags:
64 'N': 'Upload required',
65 'A': 'Upload has been attempted, but we need to check whether it worked',
66 'V': 'Upload has been checked (exists and has correct size)',
69 if status
.verified_uploads
is None:
70 # First time around; no point checking for existing uploads
71 status
.verified_uploads
= 'N' * len(uploads
)
75 print "\nUpload status:"
76 for i
, stat
in enumerate(status
.verified_uploads
):
77 print "- %s : %s" % (uploads
[i
], description
[stat
])
81 if status
.verified_uploads
== 'V' * len(uploads
):
84 # Find all New archives
86 for i
, stat
in enumerate(status
.verified_uploads
):
89 to_upload
.append(uploads
[i
])
90 print "Upload %s/%s as %s" % (status
.release_version
, uploads
[i
], url(uploads
[i
]))
92 cmd
= options
.archive_upload_command
.strip()
95 # Mark all New items as Attempted
96 status
.verified_uploads
= status
.verified_uploads
.replace('N', 'A')
101 support
.show_and_run(cmd
, to_upload
)
103 if len(to_upload
) == 1:
104 print "No upload command is set => please upload the archive manually now"
105 raw_input('Press Return once the archive is uploaded.')
107 print "No upload command is set => please upload the archives manually now"
108 raw_input('Press Return once the %d archives are uploaded.' % len(to_upload
))
110 # Verify all Attempted uploads
112 for i
, stat
in enumerate(status
.verified_uploads
):
113 assert stat
in 'AV', status
.verified_uploads
115 if not is_uploaded(url(uploads
[i
]), os
.path
.getsize(uploads
[i
])):
116 print "** Archive '%s' still not uploaded! Try again..." % uploads
[i
]
122 status
.verified_uploads
= new_stat
125 if 'N' in new_stat
and cmd
:
126 raw_input('Press Return to try again.')
128 legacy_warning
= """*** Note: the upload functions of 0release
129 *** (--archive-dir-public-url, --master-feed-file, --archive-upload-command
130 *** and --master-feed-upload-command) are being replaced by 0repo. They may
131 *** go away in future. If 0repo is not suitable for your needs, please
132 *** contact the mailing list to let us know.
134 *** http://www.0install.net/0repo.html
135 *** http://www.0install.net/support.html#lists
138 def do_release(local_feed
, options
):
139 if options
.master_feed_file
or options
.archive_dir_public_url
or options
.archive_upload_command
or options
.master_feed_upload_command
:
140 print(legacy_warning
)
142 if options
.master_feed_file
:
143 options
.master_feed_file
= os
.path
.abspath(options
.master_feed_file
)
145 if not local_feed
.feed_for
:
146 raise SafeException("Feed %s missing a <feed-for> element" % local_feed
.local_path
)
148 status
= support
.Status()
149 local_impl
= support
.get_singleton_impl(local_feed
)
151 local_impl_dir
= local_impl
.id
152 assert local_impl_dir
.startswith('/')
153 local_impl_dir
= os
.path
.realpath(local_impl_dir
)
154 assert os
.path
.isdir(local_impl_dir
)
155 assert local_feed
.local_path
.startswith(local_impl_dir
+ '/')
157 # From the impl directory to the feed
158 # NOT relative to the archive root (in general)
159 local_iface_rel_path
= local_feed
.local_path
[len(local_impl_dir
) + 1:]
160 assert not local_iface_rel_path
.startswith('/')
161 assert os
.path
.isfile(os
.path
.join(local_impl_dir
, local_iface_rel_path
))
164 for phase
in valid_phases
:
165 phase_actions
[phase
] = [] # List of <release:action> elements
167 add_toplevel_dir
= None
168 release_management
= local_feed
.get_metadata(XMLNS_RELEASE
, 'management')
169 if len(release_management
) == 1:
170 info("Found <release:management> element.")
171 release_management
= release_management
[0]
172 for x
in release_management
.childNodes
:
173 if x
.uri
== XMLNS_RELEASE
and x
.name
== 'action':
174 phase
= x
.getAttribute('phase')
175 if phase
not in valid_phases
:
176 raise SafeException("Invalid action phase '%s' in local feed %s. Valid actions are:\n%s" % (phase
, local_feed
.local_path
, '\n'.join(valid_phases
)))
177 phase_actions
[phase
].append(x
.content
)
178 elif x
.uri
== XMLNS_RELEASE
and x
.name
== 'add-toplevel-directory':
179 add_toplevel_dir
= local_feed
.get_name()
181 warn("Unknown <release:management> element: %s", x
)
182 elif len(release_management
) > 1:
183 raise SafeException("Multiple <release:management> sections in %s!" % local_feed
)
185 info("No <release:management> element found in local feed.")
187 scm
= get_scm(local_feed
, options
)
189 # Path relative to the archive / SCM root
190 local_iface_rel_root_path
= local_feed
.local_path
[len(scm
.root_dir
) + 1:]
192 def run_hooks(phase
, cwd
, env
):
193 info("Running hooks for phase '%s'" % phase
)
194 full_env
= os
.environ
.copy()
196 for x
in phase_actions
[phase
]:
197 print "[%s]: %s" % (phase
, x
)
198 support
.check_call(x
, shell
= True, cwd
= cwd
, env
= full_env
)
200 def set_to_release():
201 print "Snapshot version is " + local_impl
.get_version()
202 release_version
= options
.release_version
203 if release_version
is None:
204 suggested
= support
.suggest_release_version(local_impl
.get_version())
205 release_version
= raw_input("Version number for new release [%s]: " % suggested
)
206 if not release_version
:
207 release_version
= suggested
209 scm
.ensure_no_tag(release_version
)
211 status
.head_before_release
= scm
.get_head_revision()
214 working_copy
= local_impl
.id
215 run_hooks('commit-release', cwd
= working_copy
, env
= {'RELEASE_VERSION': release_version
})
217 print "Releasing version", release_version
218 support
.publish(local_feed
.local_path
, set_released
= 'today', set_version
= release_version
)
220 support
.backup_if_exists(release_version
)
221 os
.mkdir(release_version
)
222 os
.chdir(release_version
)
224 status
.old_snapshot_version
= local_impl
.get_version()
225 status
.release_version
= release_version
226 status
.head_at_release
= scm
.commit('Release %s' % release_version
, branch
= TMP_BRANCH_NAME
, parent
= 'HEAD')
229 def set_to_snapshot(snapshot_version
):
230 assert snapshot_version
.endswith('-post')
231 support
.publish(local_feed
.local_path
, set_released
= '', set_version
= snapshot_version
)
232 scm
.commit('Start development series %s' % snapshot_version
, branch
= TMP_BRANCH_NAME
, parent
= TMP_BRANCH_NAME
)
233 status
.new_snapshot_version
= scm
.get_head_revision()
236 def ensure_ready_to_release():
237 #if not options.master_feed_file:
238 # raise SafeException("Master feed file not set! Check your configuration")
240 scm
.ensure_committed()
241 scm
.ensure_versioned(os
.path
.abspath(local_feed
.local_path
))
242 info("No uncommitted changes. Good.")
243 # Not needed for GIT. For SCMs where tagging is expensive (e.g. svn) this might be useful.
244 #run_unit_tests(local_impl)
246 scm
.grep('\(^\\|[^=]\)\<\\(TODO\\|XXX\\|FIXME\\)\>')
248 def create_feed(target_feed
, local_iface_path
, archive_file
, archive_name
, main
):
249 shutil
.copyfile(local_iface_path
, target_feed
)
251 support
.publish(target_feed
,
253 archive_url
= support
.get_archive_url(options
, status
.release_version
, os
.path
.basename(archive_file
)),
254 archive_file
= archive_file
,
255 archive_extract
= archive_name
)
257 def get_previous_release(this_version
):
258 """Return the highest numbered verison in the master feed before this_version.
259 @return: version, or None if there wasn't one"""
260 parsed_release_version
= model
.parse_version(this_version
)
262 versions
= [model
.parse_version(version
) for version
in scm
.get_tagged_versions()]
263 versions
= [version
for version
in versions
if version
< parsed_release_version
]
266 return model
.format_version(max(versions
))
269 def export_changelog(previous_release
):
270 changelog
= file('changelog-%s' % status
.release_version
, 'w')
273 scm
.export_changelog(previous_release
, status
.head_before_release
, changelog
)
274 except SafeException
, ex
:
275 print "WARNING: Failed to generate changelog: " + str(ex
)
277 print "Wrote changelog from %s to here as %s" % (previous_release
or 'start', changelog
.name
)
281 def fail_candidate():
283 assert cwd
.endswith(status
.release_version
)
284 support
.backup_if_exists(cwd
)
285 scm
.delete_branch(TMP_BRANCH_NAME
)
286 os
.unlink(support
.release_status_file
)
287 print "Restored to state before starting release. Make your fixes and try again..."
289 def release_via_0repo(new_impls_feed
):
291 support
.make_archives_relative(new_impls_feed
)
294 repo
.cmd
.main(['0repo', 'add', '--', new_impls_feed
])
298 def release_without_0repo(archive_file
, new_impls_feed
):
299 assert options
.master_feed_file
301 if not options
.archive_dir_public_url
:
302 raise SafeException("Archive directory public URL is not set! Edit configuration and try again.")
304 if status
.updated_master_feed
:
305 print "Already added to master feed. Not changing."
308 if os
.path
.exists(options
.master_feed_file
):
309 # Check we haven't already released this version
310 master
= support
.load_feed(os
.path
.realpath(options
.master_feed_file
))
311 existing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == status
.release_version
]
312 if len(existing_releases
):
313 raise SafeException("Master feed %s already contains an implementation with version number %s!" % (options
.master_feed_file
, status
.release_version
))
315 previous_release
= get_previous_release(status
.release_version
)
316 previous_testing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == previous_release
317 and impl
.upstream_stability
== model
.stability_levels
["testing"]]
318 if previous_testing_releases
:
319 print "The previous release, version %s, is still marked as 'testing'. Set to stable?" % previous_release
320 if support
.get_choice(['Yes', 'No']) == 'Yes':
321 publish_opts
['select_version'] = previous_release
322 publish_opts
['set_stability'] = "stable"
324 support
.publish(options
.master_feed_file
, local
= new_impls_feed
, xmlsign
= True, key
= options
.key
, **publish_opts
)
326 status
.updated_master_feed
= 'true'
330 uploads
= [os
.path
.basename(archive_file
)]
331 for b
in compiler
.get_binary_feeds():
332 binary_feed
= support
.load_feed(b
)
333 impl
, = binary_feed
.implementations
.values()
334 uploads
.append(os
.path
.basename(impl
.download_sources
[0].url
))
336 upload_archives(options
, status
, uploads
)
338 feed_base
= os
.path
.dirname(list(local_feed
.feed_for
)[0])
339 feed_files
= [options
.master_feed_file
]
340 print "Upload %s into %s" % (', '.join(feed_files
), feed_base
)
341 cmd
= options
.master_feed_upload_command
.strip()
343 support
.show_and_run(cmd
, feed_files
)
345 print "NOTE: No feed upload command set => you'll have to upload them yourself!"
347 def accept_and_publish(archive_file
, src_feed_name
):
349 print "Already tagged in SCM. Not re-tagging."
351 scm
.ensure_committed()
352 head
= scm
.get_head_revision()
353 if head
!= status
.head_before_release
:
354 raise SafeException("Changes committed since we started!\n" +
355 "HEAD was " + status
.head_before_release
+ "\n"
358 scm
.tag(status
.release_version
, status
.head_at_release
)
359 scm
.reset_hard(TMP_BRANCH_NAME
)
360 scm
.delete_branch(TMP_BRANCH_NAME
)
362 status
.tagged
= 'true'
365 assert len(local_feed
.feed_for
) == 1
367 # Merge the source and binary feeds together first, so
368 # that we update the master feed atomically and only
369 # have to sign it once.
370 with
open(src_feed_name
, 'rb') as stream
:
371 doc
= minidom
.parse(stream
)
372 for b
in compiler
.get_binary_feeds():
373 with
open(b
, 'rb') as stream
:
374 bin_doc
= minidom
.parse(b
)
375 merge
.merge(doc
, bin_doc
)
376 new_impls_feed
= 'merged.xml'
377 with
open(new_impls_feed
, 'wb') as stream
:
380 # TODO: support uploading to a sub-feed (requires support in 0repo too)
381 master_feed
, = local_feed
.feed_for
382 repository
= registry
.lookup(master_feed
, missing_ok
= True)
384 release_via_0repo(new_impls_feed
)
386 release_without_0repo(archive_file
, new_impls_feed
)
388 os
.unlink(new_impls_feed
)
390 print "Push changes to public SCM repository..."
391 public_repos
= options
.public_scm_repository
393 scm
.push_head_and_release(status
.release_version
)
395 print "NOTE: No public repository set => you'll have to push the tag and trunk yourself."
397 os
.unlink(support
.release_status_file
)
399 if status
.head_before_release
:
400 head
= scm
.get_head_revision()
401 if status
.release_version
:
402 print "RESUMING release of %s %s" % (local_feed
.get_name(), status
.release_version
)
403 if options
.release_version
and options
.release_version
!= status
.release_version
:
404 raise SafeException("Can't start release of version %s; we are currently releasing %s.\nDelete the release-status file to abort the previous release." % (options
.release_version
, status
.release_version
))
405 elif head
== status
.head_before_release
:
406 print "Restarting release of %s (HEAD revision has not changed)" % local_feed
.get_name()
408 raise SafeException("Something went wrong with the last run:\n" +
409 "HEAD revision for last run was " + status
.head_before_release
+ "\n" +
410 "HEAD revision now is " + head
+ "\n" +
411 "You should revert your working copy to the previous head and try again.\n" +
412 "If you're sure you want to release from the current head, delete '" + support
.release_status_file
+ "'")
414 print "Releasing", local_feed
.get_name()
416 ensure_ready_to_release()
418 if status
.release_version
:
419 if not os
.path
.isdir(status
.release_version
):
420 raise SafeException("Can't resume; directory %s missing. Try deleting '%s'." % (status
.release_version
, support
.release_status_file
))
421 os
.chdir(status
.release_version
)
422 need_set_snapshot
= False
424 print "Already tagged. Resuming the publishing process..."
425 elif status
.new_snapshot_version
:
426 head
= scm
.get_head_revision()
427 if head
!= status
.head_before_release
:
428 raise SafeException("There are more commits since we started!\n"
429 "HEAD was " + status
.head_before_release
+ "\n"
430 "HEAD now " + head
+ "\n"
431 "To include them, delete '" + support
.release_status_file
+ "' and try again.\n"
432 "To leave them out, put them on a new branch and reset HEAD to the release version.")
434 raise SafeException("Something went wrong previously when setting the new snapshot version.\n" +
435 "Suggest you reset to the original HEAD of\n%s and delete '%s'." % (status
.head_before_release
, support
.release_status_file
))
437 set_to_release() # Changes directory
438 assert status
.release_version
439 need_set_snapshot
= True
441 # May be needed by the upload command
442 os
.environ
['RELEASE_VERSION'] = status
.release_version
444 archive_name
= support
.make_archive_name(local_feed
.get_name(), status
.release_version
)
445 archive_file
= archive_name
+ '.tar.bz2'
447 export_prefix
= archive_name
448 if add_toplevel_dir
is not None:
449 export_prefix
+= '/' + add_toplevel_dir
451 if status
.created_archive
and os
.path
.isfile(archive_file
):
452 print "Archive already created"
454 support
.backup_if_exists(archive_file
)
455 scm
.export(export_prefix
, archive_file
, status
.head_at_release
)
457 has_submodules
= scm
.has_submodules()
459 if phase_actions
['generate-archive'] or has_submodules
:
461 support
.unpack_tarball(archive_file
)
463 scm
.export_submodules(archive_name
)
464 run_hooks('generate-archive', cwd
= archive_name
, env
= {'RELEASE_VERSION': status
.release_version
})
465 info("Regenerating archive (may have been modified by generate-archive hooks...")
466 support
.check_call(['tar', 'cjf', archive_file
, archive_name
])
467 except SafeException
:
468 scm
.reset_hard(scm
.get_current_branch())
472 status
.created_archive
= 'true'
475 if need_set_snapshot
:
476 set_to_snapshot(status
.release_version
+ '-post')
477 # Revert back to the original revision, so that any fixes the user makes
478 # will get applied before the tag
479 scm
.reset_hard(scm
.get_current_branch())
481 #backup_if_exists(archive_name)
482 support
.unpack_tarball(archive_file
)
484 extracted_feed_path
= os
.path
.abspath(os
.path
.join(export_prefix
, local_iface_rel_root_path
))
485 assert os
.path
.isfile(extracted_feed_path
), "Local feed not in archive! Is it under version control?"
486 extracted_feed
= support
.load_feed(extracted_feed_path
)
487 extracted_impl
= support
.get_singleton_impl(extracted_feed
)
489 if extracted_impl
.main
:
490 # Find main executable, relative to the archive root
491 abs_main
= os
.path
.join(os
.path
.dirname(extracted_feed_path
), extracted_impl
.id, extracted_impl
.main
)
492 main
= support
.relative_path(archive_name
+ '/', abs_main
)
493 if main
!= extracted_impl
.main
:
494 print "(adjusting main: '%s' for the feed inside the archive, '%s' externally)" % (extracted_impl
.main
, main
)
495 # XXX: this is going to fail if the feed uses the new <command> syntax
496 if not os
.path
.exists(abs_main
):
497 raise SafeException("Main executable '%s' not found after unpacking archive!" % abs_main
)
498 if main
== extracted_impl
.main
:
499 main
= None # Don't change the main attribute
504 if status
.src_tests_passed
:
505 print "Unit-tests already passed - not running again"
507 # Make directories read-only (checks tests don't write)
508 support
.make_readonly_recursive(archive_name
)
510 run_unit_tests(extracted_feed_path
)
511 status
.src_tests_passed
= True
513 except SafeException
:
514 print "(leaving extracted directory for examination)"
517 # Unpack it again in case the unit-tests changed anything
518 ro_rmtree(archive_name
)
519 support
.unpack_tarball(archive_file
)
521 # Generate feed for source
522 src_feed_name
= '%s.xml' % archive_name
523 create_feed(src_feed_name
, extracted_feed_path
, archive_file
, archive_name
, main
)
524 print "Wrote source feed as %s" % src_feed_name
526 # If it's a source package, compile the binaries now...
527 compiler
= compile.Compiler(options
, os
.path
.abspath(src_feed_name
), release_version
= status
.release_version
)
528 compiler
.build_binaries()
530 previous_release
= get_previous_release(status
.release_version
)
531 export_changelog(previous_release
)
534 raw_input('Already tagged. Press Return to resume publishing process...')
537 print "\nCandidate release archive:", archive_file
538 print "(extracted to %s for inspection)" % os
.path
.abspath(archive_name
)
540 print "\nPlease check candidate and select an action:"
541 print "P) Publish candidate (accept)"
542 print "F) Fail candidate (delete release-status file)"
544 print "D) Diff against release archive for %s" % previous_release
545 maybe_diff
= ['Diff']
548 print "(you can also hit CTRL-C and resume this script when done)"
551 choice
= support
.get_choice(['Publish', 'Fail'] + maybe_diff
)
553 previous_archive_name
= support
.make_archive_name(local_feed
.get_name(), previous_release
)
554 previous_archive_file
= '../%s/%s.tar.bz2' % (previous_release
, previous_archive_name
)
556 # For archives created by older versions of 0release
557 if not os
.path
.isfile(previous_archive_file
):
558 old_previous_archive_file
= '../%s.tar.bz2' % previous_archive_name
559 if os
.path
.isfile(old_previous_archive_file
):
560 previous_archive_file
= old_previous_archive_file
562 if os
.path
.isfile(previous_archive_file
):
563 support
.unpack_tarball(previous_archive_file
)
565 support
.show_diff(previous_archive_name
, archive_name
)
567 shutil
.rmtree(previous_archive_name
)
570 print "Sorry, archive file %s not found! Can't show diff." % previous_archive_file
574 info("Deleting extracted archive %s", archive_name
)
575 shutil
.rmtree(archive_name
)
577 if choice
== 'Publish':
578 accept_and_publish(archive_file
, src_feed_name
)
580 assert choice
== 'Fail'