2 # Copyright (C) 2009, Thomas Leonard
3 # Copyright (C) 2008, Anders F Bjorklund
4 # See the COPYING file for details, or visit http://0install.net.
7 from optparse
import OptionParser
8 import tempfile
, shutil
, os
9 from xml
.dom
import minidom
14 import xml
.etree
.cElementTree
as ET
# Python 2.5
17 import xml
.etree
.ElementTree
as ET
20 import cElementTree
as ET
# http://effbot.org
22 import elementtree
.ElementTree
as ET
26 from subprocess
import check_call
28 def check_call(*popenargs
, **kwargs
):
29 rc
= subprocess
.call(*popenargs
, **kwargs
)
30 if rc
!= 0: raise OSError, rc
32 from zeroinstall
.injector
import model
, qdom
, distro
33 from zeroinstall
.zerostore
import unpack
35 from support
import read_child
, add_node
, Mappings
37 manifest_algorithm
= 'sha1new'
39 deb_category_to_freedesktop
= {
40 'devel' : 'Development',
42 'graphics' : 'Graphics',
46 rpm_group_to_freedesktop
= {
47 'Development/Libraries' : 'Development',
50 rpm_license_to_trove
= {
51 'BSD' : "OSI Approved :: BSD License (revised)",
52 'MIT' : "OSI Approved :: MIT/X Consortium License",
53 'GPLv2' : "OSI Approved :: GNU General Public License v2",
54 'GPLv2+' : "OSI Approved :: GNU General Public License v2",
55 'GPLv3' : "OSI Approved :: GNU General Public License v3",
56 'GPLv3+' : "OSI Approved :: GNU General Public License v3",
57 'LGPLv2' : "OSI Approved :: GNU Lesser General Public License (LGPL)",
58 'LGPLv2+' : "OSI Approved :: GNU Lesser General Public License (LGPL)",
59 'GFDL' : "OSI Approved :: GNU Free Documentation License (FDL)",
60 'Python' : "OSI Approved :: Python License",
63 slack_series_to_freedesktop
= {
82 # Parse command-line arguments
84 parser
= OptionParser('usage: %prog [options] http://.../package.deb [target-feed.xml]\n'
85 ' %prog [options] http://.../package.rpm [target-feed.xml]\n'
86 ' %prog [options] http://.../package.txz [target-feed.xml]\n'
87 ' %prog [options] package-name [target-feed.xml]\n'
88 'Publish a Debian, RPM, or Slack package in a Zero Install feed.\n'
89 "target-feed.xml is created if it doesn't already exist.")
90 parser
.add_option("-a", "--archive-url", help="archive to use as the package contents")
91 parser
.add_option("", "--archive-extract", help="only extract files under this subdirectory")
92 parser
.add_option("", "--license", help="value for 'license' attribute")
93 parser
.add_option("-r", "--repomd-file", help="repository metadata file")
94 parser
.add_option("", "--path", help="location of packages [5/os/i386]")
95 parser
.add_option("-p", "--packages-file", help="Debian/Slack package index file")
96 parser
.add_option("-5", "--checksums-file", help="Slack package MD5 checksum file")
97 parser
.add_option("-m", "--mirror", help="location of packages [http://ftp.debian.org/debian] or [http://mirror.centos.org/centos]")
98 parser
.add_option("-k", "--key", help="key to use for signing")
99 (options
, args
) = parser
.parse_args()
101 if len(args
) < 1 or len(args
) > 2:
105 # Load dependency mappings
106 mappings
= Mappings()
121 def __init__(self
, options
):
122 self
.packages_base_url
= (options
.mirror
or 'http://ftp.debian.org/debian') + '/'
123 self
.packages_file
= options
.packages_file
or 'Packages'
125 def get_repo_metadata(self
, pkg_name
):
126 if not os
.path
.isfile(self
.packages_file
):
127 print >>sys
.stderr
, ("File '%s' not found (use -p to give its location).\n"
128 "Either download one (e.g. ftp://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2),\n"
129 "or specify the full URL of the .deb package to use.") % self
.packages_file
131 if self
.packages_file
.endswith('.bz2'):
136 pkg_data
= "\n" + opener(self
.packages_file
).read()
138 i
= pkg_data
.index('\nPackage: %s\n' % pkg_name
)
140 raise Exception("Package '%s' not found in Packages file '%s'." % (pkg_name
, self
.packages_file
))
141 j
= pkg_data
.find('\n\n', i
)
143 pkg_info
= pkg_data
[i
:]
145 pkg_info
= pkg_data
[i
:j
]
148 for line
in pkg_info
.split('\n'):
149 if ':' in line
and not line
.startswith(' '):
150 key
, value
= line
.split(':', 1)
151 if key
== 'Filename':
152 filename
= value
.strip()
153 elif key
in ('SHA1', 'SHA256'):
154 digest
[key
.lower()] = value
.strip()
156 raise Exception('Filename: field not found in package data:\n' + pkg_info
)
157 pkg_url
= self
.packages_base_url
+ filename
159 return pkg_url
, digest
161 def get_package_metadata(self
, pkg_file
):
164 details
= read_child(['dpkg-deb', '--info', pkg_file
])
166 description_and_summary
= details
.split('\n Description: ')[1].split('\n')
167 package
.summary
= description_and_summary
[0]
169 for x
in description_and_summary
[1:]:
178 description
+= x
[1:].replace('. ', '. ') + '\n'
179 package
.description
= description
.strip()
181 for line
in details
.split('\n'):
182 if not line
: continue
183 assert line
.startswith(' ')
186 key
, value
= line
.split(':', 1)
187 value
= value
.strip()
189 package
.category
= deb_category_to_freedesktop
.get(value
)
190 if not package
.category
:
192 print >>sys
.stderr
, "Warning: no mapping for Debian category '%s'" % value
193 elif key
== 'Package':
195 elif key
== 'Version':
196 value
= value
.replace('cvs', '')
197 value
= value
.replace('svn', '')
198 if ':' in value
: value
= value
.split(':', 1)[1]
199 package
.version
= distro
.try_cleanup_distro_version(value
)
200 elif key
== 'Architecture':
202 arch
, value
= value
.split('-', 1)
209 package
.arch
= arch
.capitalize() + '-' + value
210 elif key
== 'Depends':
211 for x
in value
.split(','):
212 req
= mappings
.process(x
)
214 package
.requires
.append(req
)
218 def __init__(self
, options
):
219 self
.packages_base_url
= (options
.mirror
or 'http://mirror.centos.org/centos') + '/'
220 self
.packages_base_dir
= (options
.path
or '5/os/i386') + '/'
221 self
.repomd_file
= options
.repomd_file
or 'repodata/repomd.xml'
222 if not os
.path
.isfile(self
.repomd_file
):
223 print >>sys
.stderr
, ("File '%s' not found (use -r to give its location).\n"
224 "Either download one (e.g. http://mirror.centos.org/centos/5/os/i386/repodata/repomd.xml),\n"
225 "or specify the full URL of the .rpm package to use.") % self
.repomd_file
228 def get_repo_metadata(self
, pkg_name
):
230 repomd
= minidom
.parse(self
.repomd_file
)
231 repo_top
= os
.path
.dirname(os
.path
.dirname(self
.repomd_file
))
232 for data
in repomd
.getElementsByTagName("data"):
233 if data
.attributes
["type"].nodeValue
== "primary":
234 for node
in data
.getElementsByTagName("location"):
235 primary_file
= os
.path
.join(repo_top
, node
.attributes
["href"].nodeValue
)
237 primary
= ET
.parse(gzip
.open(primary_file
))
238 NS
= "http://linux.duke.edu/metadata/common"
239 metadata
= primary
.getroot()
241 for package
in metadata
.findall("{%s}package" % NS
):
242 if package
.find("{%s}name" % NS
).text
== pkg_name
:
244 location
= pkg_data
.find("{%s}location" % NS
).get("href")
247 raise Exception("Package '%s' not found in repodata." % pkg_name
)
248 checksum
= pkg_data
.find("{%s}checksum" % NS
)
250 if checksum
.get("type") == "sha":
251 digest
["sha1"] = checksum
.text
252 if checksum
.get("type") == "sha256":
253 digest
["sha256"] = checksum
.text
255 raise Exception('location tag not found in primary metadata:\n' + primary_file
)
256 pkg_url
= self
.packages_base_url
+ self
.packages_base_dir
+ location
258 return pkg_url
, digest
260 def get_package_metadata(self
, pkg_file
):
263 query_format
= '%{SUMMARY}\\a%{DESCRIPTION}\\a%{NAME}\\a%{VERSION}\\a%{OS}\\a%{ARCH}\\a%{URL}\\a%{GROUP}\\a%{LICENSE}\\a%{BUILDTIME}\\a[%{REQUIRES}\\n]'
264 headers
= read_child(['rpm', '--qf', query_format
, '-qp', pkg_file
]).split('\a')
266 package
.summary
= headers
[0].strip()
267 package
.description
= headers
[1].strip()
269 package
.name
= headers
[2]
271 value
= value
.replace('cvs', '')
272 value
= value
.replace('svn', '')
273 value
= distro
.try_cleanup_distro_version(value
)
274 package
.version
= value
276 package
.arch
= value
.capitalize()
280 if value
== 'noarch':
282 package
.arch
+= '-' + value
283 value
= headers
[6].strip()
286 value
= headers
[7].strip()
287 package
.category
= rpm_group_to_freedesktop
.get(value
)
289 print >>sys
.stderr
, "Warning: no mapping for RPM group '%s'" % value
291 value
= headers
[8].strip()
292 package
.license
= rpm_license_to_trove
.get(value
, value
)
293 value
= headers
[9].strip()
294 package
.buildtime
= long(value
)
295 value
= headers
[10].strip()
296 for x
in value
.split('\n'):
297 if x
.startswith('rpmlib'):
299 req
= mappings
.process(x
)
301 package
.requires
.append(req
)
305 def __init__(self
, options
):
306 self
.packages_base_url
= (options
.mirror
or 'ftp://ftp.slackware.com/pub/slackware') + '/'
307 self
.packages_base_dir
= (options
.path
or 'slackware-current') + '/'
308 self
.packages_file
= options
.packages_file
or 'PACKAGES.TXT'
309 self
.checksums_file
= options
.checksums_file
or 'CHECKSUMS.md5'
311 NAMERE
= re
.compile("^(.+?)-([^-]+-[^-]+-[^-]+?)(.t[gblx]z)?$")
313 def get_package_name(self
, pkg_file
):
314 m
= self
.NAMERE
.match(pkg_file
)
320 def get_repo_metadata(self
, pkg_name
):
321 if not os
.path
.isfile(self
.packages_file
):
322 print >>sys
.stderr
, ("File '%s' not found (use -p to give its location).\n"
323 "Either download one (e.g. ftp://ftp.slackware.com/pub/slackware/slackware-current/PACKAGES.TXT),\n"
324 "or specify the full URL of the .txz package to use.") % self
.packages_file
326 if self
.packages_file
.endswith('.gz'):
328 opener
= gzip
.GZipFile
331 pkg_data
= "\n" + opener(self
.packages_file
).read()
334 i
= pkg_data
.index('\nPACKAGE NAME: %s-' % pkg_name
)
335 j
= pkg_data
.find('\n\n', i
)
337 pkg_info
= pkg_data
[i
:]
339 pkg_info
= pkg_data
[i
:j
]
340 name
= self
.get_package_name(pkg_info
[16:].split('\n')[0])
342 pkg_data
= pkg_data
[j
:]
346 raise Exception("Package '%s' not found in Packages file '%s'." % (pkg_name
, self
.packages_file
))
351 for line
in pkg_info
.split('\n'):
353 if line
.startswith(pkg_name
+ ': '):
354 description
+= line
.replace(pkg_name
+ ': ', "")
356 key
, value
= line
.split(':', 1)
357 if key
== "PACKAGE NAME":
359 if key
== "PACKAGE LOCATION":
360 location
= value
.strip()
361 if name
is None or location
is None:
362 raise Exception('Package filename not found in package data:\n' + pkg_info
)
363 pkg_file
= os
.path
.join(location
, name
)
364 if location
.startswith('./'):
365 location
= location
.replace('./', "")
366 pkg_url
= self
.packages_base_url
+ self
.packages_base_dir
+ location
+ '/' + name
367 if os
.path
.exists(self
.checksums_file
):
368 checksums
= open(self
.checksums_file
)
369 for line
in checksums
:
370 if line
.startswith("MD5 message digest"):
372 for line
in checksums
:
373 digest
, filename
= line
.strip().split(None, 1)
374 if filename
== pkg_file
:
377 digest
= {'md5': digest
}
379 return pkg_url
, digest
381 def get_package_metadata(self
, pkg_file
):
384 if pkg_file
.endswith('.txz'):
385 fd
, tmp_file
= tempfile
.mkstemp(suffix
='.tar')
386 check_call(['cp', pkg_file
, (tmp_file
+ '.xz')])
387 check_call(['rm', tmp_file
])
388 check_call(['unxz', (tmp_file
+ '.xz')])
389 pkg_info
= tarfile
.open(tmp_file
)
392 pkg_info
= tarfile
.open(pkg_file
)
393 slack_desc
= pkg_info
.extractfile('install/slack-desc')
394 pkg_name
= self
.get_package_name(os
.path
.basename(pkg_file
))
396 for line
in slack_desc
:
397 if line
.startswith(pkg_name
+ ': '):
398 description
+= line
.replace(pkg_name
+ ': ', "")
399 package
.summary
, package
.description
= description
.split('\n', 1)
400 for line
in description
:
401 if line
.startswith("Homepage: "):
402 package
.homepage
= line
[10:].strip()
403 m
= self
.NAMERE
.match(os
.path
.basename(pkg_file
))
404 package
.name
= m
.group(1)
405 version
= m
.group(2).split('-')
406 package
.version
= "%s-%s" % (version
[0], version
[2])
407 package
.arch
= "%s-%s" % ("Linux", version
[1])
412 if args
[0].endswith('.txz') or options
.checksums_file
:
413 repo
= SlackRepo(options
)
415 elif args
[0].endswith('.deb') or options
.packages_file
:
416 repo
= DebRepo(options
)
418 elif args
[0].endswith('.rpm') or options
.repomd_file
:
419 repo
= RPMRepo(options
)
422 print >>sys
.stderr
, "Use --packages-file for Debian, or --repomd-file for RPM"
427 if options
.archive_url
:
429 pkg_file
= os
.path
.abspath(args
[0])
430 archive_url
= options
.archive_url
431 archive_file
= os
.path
.abspath(archive_url
.rsplit('/', 1)[1])
433 assert os
.path
.exists(pkg_file
), ("%s doesn't exist!" % pkg_file
)
435 scheme
= args
[0].split(':', 1)[0]
436 if scheme
in ('http', 'https', 'ftp'):
437 archive_url
= args
[0]
440 archive_url
, digest
= repo
.get_repo_metadata(args
[0])
441 archive_file
= os
.path
.abspath(archive_url
.rsplit('/', 1)[1])
442 pkg_url
= archive_url
443 pkg_file
= archive_file
445 # pkg_url, pkg_archive = .deb or .rpm with the metadata
446 # archive_url, archive_file = .dep, .rpm or .tar.bz2 with the contents
448 # Often pkg == archive, but sometimes it's useful to convert packages to tarballs
449 # so people don't need special tools to extract them.
452 # Download package, if required
454 if not os
.path
.exists(pkg_file
):
455 print >>sys
.stderr
, "File '%s' not found, so downloading from %s..." % (pkg_file
, pkg_url
)
456 if os
.path
.exists('/usr/bin/wget'):
457 check_call(['wget', pkg_url
])
459 check_call(['curl', '-LO', pkg_url
])
461 # Check digest, if known
463 if "sha256" in digest
:
465 m
= hashlib
.new('sha256')
466 expected_digest
= digest
["sha256"]
467 elif "sha1" in digest
:
470 m
= hashlib
.new('sha1')
474 expected_digest
= digest
["sha1"]
475 elif "md5" in digest
:
478 m
= hashlib
.new('md5')
482 expected_digest
= digest
["md5"]
487 m
.update(file(archive_file
).read())
488 actual
= m
.hexdigest()
489 if actual
!= expected_digest
:
490 raise Exception("Incorrect digest on package file! Was " + actual
+ ", but expected " + expected_digest
)
492 print "Package's digest matches value in reposistory metadata (" + actual
+ "). Good."
494 print >>sys
.stderr
, "Note: no MD5, SHA-1 or SHA-256 digest known for this package, so not checking..."
496 # Extract meta-data from package
498 pkg_metadata
= repo
.get_package_metadata(pkg_file
)
500 # Unpack package, find binaries and .desktop files, and add to cache
504 tmp
= tempfile
.mkdtemp(prefix
= 'pkg2zero-')
507 unpack
.unpack_archive(archive_file
, open(archive_file
), destdir
= unpack_dir
, extract
= options
.archive_extract
)
508 if options
.archive_extract
:
509 unpack_dir
= os
.path
.join(unpack_dir
, options
.archive_extract
)
513 for root
, dirs
, files
in os
.walk(unpack_dir
):
514 assert root
.startswith(unpack_dir
)
515 relative_root
= root
[len(unpack_dir
) + 1:]
517 full
= os
.path
.join(root
, name
)
518 f
= os
.path
.join(relative_root
, name
)
520 if f
.endswith('.desktop'):
521 for line
in file(full
):
522 if line
.startswith('Categories'):
523 for cat
in line
.split('=', 1)[1].split(';'):
525 if cat
in valid_categories
:
528 elif line
.startswith('Icon'):
529 icon
= line
.split('=', 1)[1].strip()
530 elif f
.startswith('bin/') or f
.startswith('usr/bin/') or f
.startswith('usr/games/'):
531 if os
.path
.isfile(full
):
532 possible_mains
.append(f
)
533 elif f
.endswith('.png'):
535 images
[os
.path
.basename(f
)] = full
536 # make sure to also map basename without the extension
537 images
[os
.path
.splitext(os
.path
.basename(f
))[0]] = full
541 print "Using %s for icon" % os
.path
.basename(images
[icon
])
542 icondata
= file(images
[icon
]).read()
544 manifest
= read_child(['0store', 'manifest', unpack_dir
, manifest_algorithm
])
545 digest
= manifest
.rsplit('\n', 2)[1]
546 check_call(['0store', 'add', digest
, unpack_dir
])
551 possible_mains
= sorted(possible_mains
, key
= len)
552 pkg_main
= possible_mains
[0]
553 if len(possible_mains
) > 1:
554 print "Warning: several possible main binaries found:"
555 print "- " + pkg_main
+ " (I chose this one)"
556 for x
in possible_mains
[1:]:
561 # Make sure we haven't added this version already...
564 target_feed_file
= args
[1]
565 target_icon_file
= args
[1].replace('.xml', '.png')
567 target_feed_file
= pkg_metadata
.name
+ '.xml'
568 target_icon_file
= pkg_metadata
.name
+ '.png'
572 if os
.path
.isfile(target_feed_file
):
573 dom
= qdom
.parse(file(target_feed_file
))
574 old_target_feed
= model
.ZeroInstallFeed(dom
, local_path
= target_feed_file
)
575 existing_impl
= old_target_feed
.implementations
.get(digest
)
577 print >>sys
.stderr
, ("Feed '%s' already contains an implementation with this digest!\n%s" % (target_feed_file
, existing_impl
))
580 # No target, so need to pick a URI
581 feed_uri
= mappings
.lookup(pkg_metadata
.name
)
583 suggestion
= mappings
.get_suggestion(pkg_metadata
.name
)
584 uri
= raw_input('Enter the URI for this feed [%s]: ' % suggestion
).strip()
587 assert uri
.startswith('http://') or uri
.startswith('https://') or uri
.startswith('ftp://'), uri
589 mappings
.add_mapping(pkg_metadata
.name
, uri
)
591 if icondata
and not os
.path
.isfile(target_icon_file
):
592 file = open(target_icon_file
, 'wb')
595 if icon_uri
is None and os
.path
.exists(target_icon_file
):
596 suggestion
= 'http://0install.net/feed_icons/' + target_icon_file
597 uri
= raw_input('Enter the URI for this icon [%s]: ' % suggestion
).strip()
600 assert uri
.startswith('http://') or uri
.startswith('https://') or uri
.startswith('ftp://'), uri
603 # Create a local feed with just the new version...
605 template
= '''<interface xmlns="http://zero-install.sourceforge.net/2004/injector/interface">
607 doc
= minidom
.parseString(template
)
608 root
= doc
.documentElement
610 add_node(root
, 'name', pkg_metadata
.name
)
611 add_node(root
, 'summary', pkg_metadata
.summary
)
612 add_node(root
, 'description', pkg_metadata
.description
)
613 feed_for
= add_node(root
, 'feed-for', '')
615 feed_for
.setAttribute('interface', feed_uri
)
617 icon
= add_node(root
, 'icon')
618 icon
.setAttribute('href', icon_uri
)
619 icon
.setAttribute('type', 'image/png')
620 if pkg_metadata
.homepage
:
621 add_node(root
, 'homepage', pkg_metadata
.homepage
)
622 if pkg_metadata
.category
:
623 add_node(root
, 'category', pkg_metadata
.category
)
625 package
= add_node(root
, 'package-implementation', '')
626 package
.setAttribute('package', pkg_metadata
.name
)
627 package
.setAttribute('distributions', dist
)
629 group
= add_node(root
, 'group', '')
630 if pkg_metadata
.arch
:
631 group
.setAttribute('arch', pkg_metadata
.arch
)
633 print >>sys
.stderr
, "No Architecture: field in package"
634 if pkg_metadata
.license
:
635 group
.setAttribute('license', pkg_metadata
.license
)
637 for req
in pkg_metadata
.requires
:
638 req_element
= add_node(group
, 'requires', before
= '\n ', after
= '')
639 req_element
.setAttribute('interface', req
.interface
)
640 binding
= add_node(req_element
, 'environment', before
= '\n ', after
= '\n ')
641 binding
.setAttribute('name', 'LD_LIBRARY_PATH')
642 binding
.setAttribute('insert', 'usr/lib')
645 group
.setAttribute('main', pkg_main
)
646 package
.setAttribute('main', '/' + pkg_main
)
648 impl
= add_node(group
, 'implementation', before
= '\n ', after
= '\n ')
649 impl
.setAttribute('id', digest
)
650 assert pkg_metadata
.version
651 impl
.setAttribute('version', pkg_metadata
.version
)
654 impl
.setAttribute('license', options
.license
)
656 if pkg_metadata
.buildtime
:
657 impl
.setAttribute('released', time
.strftime('%Y-%m-%d', time
.localtime(pkg_metadata
.buildtime
)))
659 impl
.setAttribute('released', time
.strftime('%Y-%m-%d'))
661 archive
= add_node(impl
, 'archive', before
= '\n ', after
= '\n ')
662 archive
.setAttribute('href', archive_url
)
663 archive
.setAttribute('size', str(os
.path
.getsize(archive_file
)))
664 if options
.archive_extract
:
665 archive
.setAttribute('extract', options
.archive_extract
)
667 # Add our new version to the main feed...
669 output_stream
= tempfile
.NamedTemporaryFile(prefix
= 'pkg2zero-')
671 output_stream
.write("<?xml version='1.0'?>\n")
672 root
.writexml(output_stream
)
673 output_stream
.write('\n')
674 output_stream
.flush()
676 publishing_options
= []
678 # Note: 0publish < 0.16 requires the --xmlsign option too
679 publishing_options
+= ['--xmlsign', '--key', options
.key
]
680 PUBLISH_COMMAND
= os
.environ
.get('PUBLISH_COMMAND', '0publish')
681 check_call([PUBLISH_COMMAND
] + publishing_options
+ ['--local', output_stream
.name
, target_feed_file
])
682 print "Added version %s to %s" % (pkg_metadata
.version
, target_feed_file
)
684 output_stream
.close()