1 # Copyright (C) 2009, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
4 import os
, sys
, subprocess
, shutil
, tempfile
5 from zeroinstall
import SafeException
6 from zeroinstall
.injector
import reader
, model
, qdom
7 from zeroinstall
.support
import ro_rmtree
8 from logging
import info
, warn
10 import support
, compile
11 from scm
import get_scm
13 XMLNS_RELEASE
= 'http://zero-install.sourceforge.net/2007/namespaces/0release'
15 valid_phases
= ['commit-release', 'generate-archive']
17 TMP_BRANCH_NAME
= '0release-tmp'
19 test_command
= os
.environ
['0TEST']
21 def run_unit_tests(local_feed
):
22 print "Running self-tests..."
23 exitstatus
= subprocess
.call([test_command
, '--', local_feed
])
25 print "SKIPPED unit tests for %s (no 'self-test' attribute set)" % local_feed
28 raise SafeException("Self-test failed with exit status %d" % exitstatus
)
30 def get_archive_url(options
, status
, archive
):
31 archive_dir_public_url
= options
.archive_dir_public_url
.replace('$RELEASE_VERSION', status
.release_version
)
32 if not archive_dir_public_url
.endswith('/'):
33 archive_dir_public_url
+= '/'
34 return archive_dir_public_url
+ archive
36 def upload_archives(options
, status
, uploads
):
37 # For each binary or source archive in uploads, ensure it is available
38 # from options.archive_dir_public_url
40 # We try to do all the uploads together first, and then verify them all
41 # afterwards. This is because we may have to wait for them to be moved
42 # from an incoming queue before we can test them.
45 return get_archive_url(options
, status
, archive
)
47 # Check that url exists and has the given size
48 def is_uploaded(url
, size
):
49 if url
.startswith('http://TESTING/releases'):
52 print "Testing URL %s..." % url
54 actual_size
= int(support
.get_size(url
))
56 print "Can't get size of '%s': %s" % (url
, ex
)
59 if actual_size
== size
:
61 print "WARNING: %s exists, but size is %d, not %d!" % (url
, actual_size
, size
)
64 # status.verified_uploads is an array of status flags:
66 'N': 'Upload required',
67 'A': 'Upload has been attempted, but we need to check whether it worked',
68 'V': 'Upload has been checked (exists and has correct size)',
71 if status
.verified_uploads
is None:
72 # First time around; no point checking for existing uploads
73 status
.verified_uploads
= 'N' * len(uploads
)
77 print "\nUpload status:"
78 for i
, stat
in enumerate(status
.verified_uploads
):
79 print "- %s : %s" % (uploads
[i
], description
[stat
])
83 if status
.verified_uploads
== 'V' * len(uploads
):
86 # Find all New archives
88 for i
, stat
in enumerate(status
.verified_uploads
):
91 to_upload
.append(uploads
[i
])
92 print "Upload %s/%s as %s" % (status
.release_version
, uploads
[i
], url(uploads
[i
]))
94 cmd
= options
.archive_upload_command
.strip()
97 # Mark all New items as Attempted
98 status
.verified_uploads
= status
.verified_uploads
.replace('N', 'A')
103 support
.show_and_run(cmd
, to_upload
)
105 if len(to_upload
) == 1:
106 print "No upload command is set => please upload the archive manually now"
107 raw_input('Press Return once the archive is uploaded.')
109 print "No upload command is set => please upload the archives manually now"
110 raw_input('Press Return once the %d archives are uploaded.' % len(to_upload
))
112 # Verify all Attempted uploads
114 for i
, stat
in enumerate(status
.verified_uploads
):
115 assert stat
in 'AV', status
.verified_uploads
117 if not is_uploaded(url(uploads
[i
]), os
.path
.getsize(uploads
[i
])):
118 print "** Archive '%s' still not uploaded! Try again..." % uploads
[i
]
124 status
.verified_uploads
= new_stat
127 if 'N' in new_stat
and cmd
:
128 raw_input('Press Return to try again.')
130 def do_release(local_feed
, options
):
131 assert options
.master_feed_file
132 options
.master_feed_file
= os
.path
.abspath(options
.master_feed_file
)
134 if not options
.archive_dir_public_url
:
135 raise SafeException("Downloads directory not set. Edit the 'make-release' script and try again.")
137 if not local_feed
.feed_for
:
138 raise SafeException("Feed %s missing a <feed-for> element" % local_feed
.local_path
)
140 status
= support
.Status()
141 local_impl
= support
.get_singleton_impl(local_feed
)
143 local_impl_dir
= local_impl
.id
144 assert local_impl_dir
.startswith('/')
145 local_impl_dir
= os
.path
.realpath(local_impl_dir
)
146 assert os
.path
.isdir(local_impl_dir
)
147 assert local_feed
.local_path
.startswith(local_impl_dir
+ '/')
149 # From the impl directory to the feed
150 # NOT relative to the archive root (in general)
151 local_iface_rel_path
= local_feed
.local_path
[len(local_impl_dir
) + 1:]
152 assert not local_iface_rel_path
.startswith('/')
153 assert os
.path
.isfile(os
.path
.join(local_impl_dir
, local_iface_rel_path
))
156 for phase
in valid_phases
:
157 phase_actions
[phase
] = [] # List of <release:action> elements
159 add_toplevel_dir
= None
160 release_management
= local_feed
.get_metadata(XMLNS_RELEASE
, 'management')
161 if len(release_management
) == 1:
162 info("Found <release:management> element.")
163 release_management
= release_management
[0]
164 for x
in release_management
.childNodes
:
165 if x
.uri
== XMLNS_RELEASE
and x
.name
== 'action':
166 phase
= x
.getAttribute('phase')
167 if phase
not in valid_phases
:
168 raise SafeException("Invalid action phase '%s' in local feed %s. Valid actions are:\n%s" % (phase
, local_feed
.local_path
, '\n'.join(valid_phases
)))
169 phase_actions
[phase
].append(x
.content
)
170 elif x
.uri
== XMLNS_RELEASE
and x
.name
== 'add-toplevel-directory':
171 add_toplevel_dir
= local_feed
.get_name()
173 warn("Unknown <release:management> element: %s", x
)
174 elif len(release_management
) > 1:
175 raise SafeException("Multiple <release:management> sections in %s!" % local_feed
)
177 info("No <release:management> element found in local feed.")
179 scm
= get_scm(local_feed
, options
)
181 # Path relative to the archive / SCM root
182 local_iface_rel_root_path
= local_feed
.local_path
[len(scm
.root_dir
) + 1:]
184 def run_hooks(phase
, cwd
, env
):
185 info("Running hooks for phase '%s'" % phase
)
186 full_env
= os
.environ
.copy()
188 for x
in phase_actions
[phase
]:
189 print "[%s]: %s" % (phase
, x
)
190 support
.check_call(x
, shell
= True, cwd
= cwd
, env
= full_env
)
192 def set_to_release():
193 print "Snapshot version is " + local_impl
.get_version()
194 suggested
= support
.suggest_release_version(local_impl
.get_version())
195 release_version
= raw_input("Version number for new release [%s]: " % suggested
)
196 if not release_version
:
197 release_version
= suggested
199 scm
.ensure_no_tag(release_version
)
201 status
.head_before_release
= scm
.get_head_revision()
204 working_copy
= local_impl
.id
205 run_hooks('commit-release', cwd
= working_copy
, env
= {'RELEASE_VERSION': release_version
})
207 print "Releasing version", release_version
208 support
.publish(local_feed
.local_path
, set_released
= 'today', set_version
= release_version
)
210 support
.backup_if_exists(release_version
)
211 os
.mkdir(release_version
)
212 os
.chdir(release_version
)
214 status
.old_snapshot_version
= local_impl
.get_version()
215 status
.release_version
= release_version
216 status
.head_at_release
= scm
.commit('Release %s' % release_version
, branch
= TMP_BRANCH_NAME
, parent
= 'HEAD')
219 def set_to_snapshot(snapshot_version
):
220 assert snapshot_version
.endswith('-post')
221 support
.publish(local_feed
.local_path
, set_released
= '', set_version
= snapshot_version
)
222 scm
.commit('Start development series %s' % snapshot_version
, branch
= TMP_BRANCH_NAME
, parent
= TMP_BRANCH_NAME
)
223 status
.new_snapshot_version
= scm
.get_head_revision()
226 def ensure_ready_to_release():
227 if not options
.master_feed_file
:
228 raise SafeException("Master feed file not set! Check your configuration")
230 scm
.ensure_committed()
231 scm
.ensure_versioned(os
.path
.abspath(local_feed
.local_path
))
232 info("No uncommitted changes. Good.")
233 # Not needed for GIT. For SCMs where tagging is expensive (e.g. svn) this might be useful.
234 #run_unit_tests(local_impl)
236 scm
.grep('\(^\\|[^=]\)\<\\(TODO\\|XXX\\|FIXME\\)\>')
238 def create_feed(target_feed
, local_iface_path
, archive_file
, archive_name
, main
):
239 shutil
.copyfile(local_iface_path
, target_feed
)
241 support
.publish(target_feed
,
243 archive_url
= get_archive_url(options
, status
, os
.path
.basename(archive_file
)),
244 archive_file
= archive_file
,
245 archive_extract
= archive_name
)
247 def get_previous_release(this_version
):
248 """Return the highest numbered verison in the master feed before this_version.
249 @return: version, or None if there wasn't one"""
250 parsed_release_version
= model
.parse_version(this_version
)
252 if os
.path
.exists(options
.master_feed_file
):
253 master
= model
.Interface(os
.path
.realpath(options
.master_feed_file
))
254 reader
.update(master
, master
.uri
, local
= True)
255 versions
= [impl
.version
for impl
in master
.implementations
.values() if impl
.version
< parsed_release_version
]
257 return model
.format_version(max(versions
))
260 def export_changelog(previous_release
):
261 changelog
= file('changelog-%s' % status
.release_version
, 'w')
264 scm
.export_changelog(previous_release
, status
.head_before_release
, changelog
)
265 except SafeException
, ex
:
266 print "WARNING: Failed to generate changelog: " + str(ex
)
268 print "Wrote changelog from %s to here as %s" % (previous_release
or 'start', changelog
.name
)
272 def fail_candidate(archive_file
):
274 assert cwd
.endswith(status
.release_version
)
275 support
.backup_if_exists(cwd
)
276 scm
.delete_branch(TMP_BRANCH_NAME
)
277 os
.unlink(support
.release_status_file
)
278 print "Restored to state before starting release. Make your fixes and try again..."
280 def accept_and_publish(archive_file
, archive_name
, src_feed_name
):
281 assert options
.master_feed_file
283 if not options
.archive_dir_public_url
:
284 raise SafeException("Archive directory public URL is not set! Edit configuration and try again.")
287 print "Already tagged in SCM. Not re-tagging."
289 scm
.ensure_committed()
290 head
= scm
.get_head_revision()
291 if head
!= status
.head_before_release
:
292 raise SafeException("Changes committed since we started!\n" +
293 "HEAD was " + status
.head_before_release
+ "\n"
296 scm
.tag(status
.release_version
, status
.head_at_release
)
297 scm
.reset_hard(TMP_BRANCH_NAME
)
298 scm
.delete_branch(TMP_BRANCH_NAME
)
300 status
.tagged
= 'true'
303 if status
.updated_master_feed
:
304 print "Already added to master feed. Not changing."
307 if os
.path
.exists(options
.master_feed_file
):
308 # Check we haven't already released this version
309 master
= model
.Interface(os
.path
.realpath(options
.master_feed_file
))
310 reader
.update(master
, master
.uri
, local
= True)
311 existing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == status
.release_version
]
312 if len(existing_releases
):
313 raise SafeException("Master feed %s already contains an implementation with version number %s!" % (options
.master_feed_file
, status
.release_version
))
315 previous_release
= get_previous_release(status
.release_version
)
316 previous_testing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == previous_release
317 and impl
.upstream_stability
== model
.stability_levels
["testing"]]
318 if previous_testing_releases
:
319 print "The previous release, version %s, is still marked as 'testing'. Set to stable?" % previous_release
320 if support
.get_choice(['Yes', 'No']) == 'Yes':
321 publish_opts
['select_version'] = previous_release
322 publish_opts
['set_stability'] = "stable"
324 # Merge the source and binary feeds together first, so
325 # that we update the master feed atomically and only
326 # have to sign it once.
327 shutil
.copyfile(src_feed_name
, 'merged.xml')
328 for b
in compiler
.get_binary_feeds():
329 support
.publish('merged.xml', local
= b
)
331 support
.publish(options
.master_feed_file
, local
= 'merged.xml', xmlsign
= True, key
= options
.key
, **publish_opts
)
332 os
.unlink('merged.xml')
334 status
.updated_master_feed
= 'true'
338 uploads
= [os
.path
.basename(archive_file
)]
339 for b
in compiler
.get_binary_feeds():
341 binary_feed
= model
.ZeroInstallFeed(qdom
.parse(stream
), local_path
= b
)
343 impl
, = binary_feed
.implementations
.values()
344 uploads
.append(os
.path
.basename(impl
.download_sources
[0].url
))
346 upload_archives(options
, status
, uploads
)
348 assert len(local_feed
.feed_for
) == 1
349 feed_base
= os
.path
.dirname(list(local_feed
.feed_for
)[0])
350 feed_files
= [options
.master_feed_file
]
351 print "Upload %s into %s" % (', '.join(feed_files
), feed_base
)
352 cmd
= options
.master_feed_upload_command
.strip()
354 support
.show_and_run(cmd
, feed_files
)
356 print "NOTE: No feed upload command set => you'll have to upload them yourself!"
358 print "Push changes to public SCM repository..."
359 public_repos
= options
.public_scm_repository
361 scm
.push_head_and_release(status
.release_version
)
363 print "NOTE: No public repository set => you'll have to push the tag and trunk yourself."
365 os
.unlink(support
.release_status_file
)
367 if status
.head_before_release
:
368 head
= scm
.get_head_revision()
369 if status
.release_version
:
370 print "RESUMING release of %s %s" % (local_feed
.get_name(), status
.release_version
)
371 elif head
== status
.head_before_release
:
372 print "Restarting release of %s (HEAD revision has not changed)" % local_feed
.get_name()
374 raise SafeException("Something went wrong with the last run:\n" +
375 "HEAD revision for last run was " + status
.head_before_release
+ "\n" +
376 "HEAD revision now is " + head
+ "\n" +
377 "You should revert your working copy to the previous head and try again.\n" +
378 "If you're sure you want to release from the current head, delete '" + support
.release_status_file
+ "'")
380 print "Releasing", local_feed
.get_name()
382 ensure_ready_to_release()
384 if status
.release_version
:
385 if not os
.path
.isdir(status
.release_version
):
386 raise SafeException("Can't resume; directory %s missing. Try deleting '%s'." % (status
.release_version
, support
.release_status_file
))
387 os
.chdir(status
.release_version
)
388 need_set_snapshot
= False
390 print "Already tagged. Resuming the publishing process..."
391 elif status
.new_snapshot_version
:
392 head
= scm
.get_head_revision()
393 if head
!= status
.head_before_release
:
394 raise SafeException("There are more commits since we started!\n"
395 "HEAD was " + status
.head_before_release
+ "\n"
396 "HEAD now " + head
+ "\n"
397 "To include them, delete '" + support
.release_status_file
+ "' and try again.\n"
398 "To leave them out, put them on a new branch and reset HEAD to the release version.")
400 raise SafeException("Something went wrong previously when setting the new snapshot version.\n" +
401 "Suggest you reset to the original HEAD of\n%s and delete '%s'." % (status
.head_before_release
, support
.release_status_file
))
403 set_to_release() # Changes directory
404 assert status
.release_version
405 need_set_snapshot
= True
407 # May be needed by the upload command
408 os
.environ
['RELEASE_VERSION'] = status
.release_version
410 archive_name
= support
.make_archive_name(local_feed
.get_name(), status
.release_version
)
411 archive_file
= archive_name
+ '.tar.bz2'
413 export_prefix
= archive_name
414 if add_toplevel_dir
is not None:
415 export_prefix
+= '/' + add_toplevel_dir
417 if status
.created_archive
and os
.path
.isfile(archive_file
):
418 print "Archive already created"
420 support
.backup_if_exists(archive_file
)
421 scm
.export(export_prefix
, archive_file
, status
.head_at_release
)
423 has_submodules
= scm
.has_submodules()
425 if phase_actions
['generate-archive'] or has_submodules
:
427 support
.unpack_tarball(archive_file
)
429 scm
.export_submodules(archive_name
)
430 run_hooks('generate-archive', cwd
= archive_name
, env
= {'RELEASE_VERSION': status
.release_version
})
431 info("Regenerating archive (may have been modified by generate-archive hooks...")
432 support
.check_call(['tar', 'cjf', archive_file
, archive_name
])
433 except SafeException
:
434 scm
.reset_hard(scm
.get_current_branch())
435 fail_candidate(archive_file
)
438 status
.created_archive
= 'true'
441 if need_set_snapshot
:
442 set_to_snapshot(status
.release_version
+ '-post')
443 # Revert back to the original revision, so that any fixes the user makes
444 # will get applied before the tag
445 scm
.reset_hard(scm
.get_current_branch())
447 #backup_if_exists(archive_name)
448 support
.unpack_tarball(archive_file
)
450 extracted_iface_path
= os
.path
.abspath(os
.path
.join(export_prefix
, local_iface_rel_root_path
))
451 assert os
.path
.isfile(extracted_iface_path
), "Local feed not in archive! Is it under version control?"
452 extracted_iface
= model
.Interface(extracted_iface_path
)
453 reader
.update(extracted_iface
, extracted_iface_path
, local
= True)
454 extracted_impl
= support
.get_singleton_impl(extracted_iface
)
456 if extracted_impl
.main
:
457 # Find main executable, relative to the archive root
458 abs_main
= os
.path
.join(os
.path
.dirname(extracted_iface_path
), extracted_impl
.id, extracted_impl
.main
)
459 main
= support
.relative_path(archive_name
+ '/', abs_main
)
460 if main
!= extracted_impl
.main
:
461 print "(adjusting main: '%s' for the feed inside the archive, '%s' externally)" % (extracted_impl
.main
, main
)
462 # XXX: this is going to fail if the feed uses the new <command> syntax
463 if not os
.path
.exists(abs_main
):
464 raise SafeException("Main executable '%s' not found after unpacking archive!" % abs_main
)
465 if main
== extracted_impl
.main
:
466 main
= None # Don't change the main attribute
471 if status
.src_tests_passed
:
472 print "Unit-tests already passed - not running again"
474 # Make directories read-only (checks tests don't write)
475 support
.make_readonly_recursive(archive_name
)
477 run_unit_tests(extracted_iface_path
)
478 status
.src_tests_passed
= True
480 except SafeException
:
481 print "(leaving extracted directory for examination)"
482 fail_candidate(archive_file
)
484 # Unpack it again in case the unit-tests changed anything
485 ro_rmtree(archive_name
)
486 support
.unpack_tarball(archive_file
)
488 # Generate feed for source
489 stream
= open(extracted_iface_path
)
490 src_feed_name
= '%s.xml' % archive_name
491 create_feed(src_feed_name
, extracted_iface_path
, archive_file
, archive_name
, main
)
492 print "Wrote source feed as %s" % src_feed_name
494 # If it's a source package, compile the binaries now...
495 compiler
= compile.Compiler(options
, os
.path
.abspath(src_feed_name
))
496 compiler
.build_binaries()
498 previous_release
= get_previous_release(status
.release_version
)
499 export_changelog(previous_release
)
502 raw_input('Already tagged. Press Return to resume publishing process...')
505 print "\nCandidate release archive:", archive_file
506 print "(extracted to %s for inspection)" % os
.path
.abspath(archive_name
)
508 print "\nPlease check candidate and select an action:"
509 print "P) Publish candidate (accept)"
510 print "F) Fail candidate (untag)"
512 print "D) Diff against release archive for %s" % previous_release
513 maybe_diff
= ['Diff']
516 print "(you can also hit CTRL-C and resume this script when done)"
519 choice
= support
.get_choice(['Publish', 'Fail'] + maybe_diff
)
521 previous_archive_name
= support
.make_archive_name(local_feed
.get_name(), previous_release
)
522 previous_archive_file
= '../%s/%s.tar.bz2' % (previous_release
, previous_archive_name
)
524 # For archives created by older versions of 0release
525 if not os
.path
.isfile(previous_archive_file
):
526 old_previous_archive_file
= '../%s.tar.bz2' % previous_archive_name
527 if os
.path
.isfile(old_previous_archive_file
):
528 previous_archive_file
= old_previous_archive_file
530 if os
.path
.isfile(previous_archive_file
):
531 support
.unpack_tarball(previous_archive_file
)
533 support
.show_diff(previous_archive_name
, archive_name
)
535 shutil
.rmtree(previous_archive_name
)
538 print "Sorry, archive file %s not found! Can't show diff." % previous_archive_file
542 info("Deleting extracted archive %s", archive_name
)
543 shutil
.rmtree(archive_name
)
545 if choice
== 'Publish':
546 accept_and_publish(archive_file
, archive_name
, src_feed_name
)
548 assert choice
== 'Fail'
549 fail_candidate(archive_file
)