3 from optparse
import OptionParser
4 import sys
, shutil
, tempfile
, urlparse
6 import urllib2
, os
, httplib
8 import logging
, time
, traceback
9 from logging
import info
11 from zeroinstall
import SafeException
12 from zeroinstall
.injector
import model
, gpg
, namespaces
, qdom
14 from display
import checking
, result
, error
, highlight
, error_new_line
20 WEEK
= 60 * 60 * 24 * 7
23 if hasattr(address
, 'hostname'):
24 return address
.hostname
26 return address
[1].split(':', 1)[0]
29 if hasattr(address
, 'port'):
32 port
= address
[1].split(':', 1)[1:]
38 assert port(('http', 'foo:81')) == 81
39 assert port(urlparse
.urlparse('http://foo:81')) == 81
41 parser
= OptionParser(usage
="usage: %prog [options] feed.xml")
42 parser
.add_option("-d", "--dependencies", help="also check feeds for dependencies", action
='store_true')
43 parser
.add_option("-s", "--skip-archives", help="don't check the archives are OK", action
='store_true')
44 parser
.add_option("-v", "--verbose", help="more verbose output", action
='count')
45 parser
.add_option("-V", "--version", help="display version information", action
='store_true')
47 (options
, args
) = parser
.parse_args()
50 print "FeedLint (zero-install) " + version
51 print "Copyright (C) 2007 Thomas Leonard"
52 print "This program comes with ABSOLUTELY NO WARRANTY,"
53 print "to the extent permitted by law."
54 print "You may redistribute copies of this program"
55 print "under the terms of the GNU General Public License."
56 print "For more information about these matters, see the file named COPYING."
60 logger
= logging
.getLogger()
61 if options
.verbose
== 1:
62 logger
.setLevel(logging
.INFO
)
64 logger
.setLevel(logging
.DEBUG
)
73 to_check
= [model
.canonical_iface_uri(a
) for a
in args
]
74 except SafeException
, ex
:
75 if options
.verbose
: raise
76 print >>sys
.stderr
, ex
79 def check_key(feed
, fingerprint
):
80 for line
in os
.popen('gpg --with-colons --list-keys %s' % s
.fingerprint
):
81 if line
.startswith('pub:'):
82 key_id
= line
.split(':')[4]
85 raise SafeException('Failed to find key with fingerprint %s on your keyring' % fingerprint
)
87 key_url
= urlparse
.urljoin(feed
, '%s.gpg' % key_id
)
89 if key_url
in checked
:
90 info("(already checked key URL %s)", key_url
)
92 checking("Checking key %s" % key_url
)
93 urllib2
.urlopen(key_url
).read()
97 def get_http_size(url
, ttl
= 3):
98 address
= urlparse
.urlparse(url
)
100 if url
.lower().startswith('http://'):
101 http
= httplib
.HTTPConnection(host(address
), port(address
) or 80)
102 elif url
.lower().startswith('https://'):
103 http
= httplib
.HTTPSConnection(host(address
), port(address
) or 443)
107 parts
= url
.split('/', 3)
113 http
.request('HEAD', '/' + path
, headers
= {'Host': host(address
)})
114 response
= http
.getresponse()
116 if response
.status
== 200:
117 return response
.getheader('Content-Length')
118 elif response
.status
in (301, 302, 303):
119 new_url_rel
= response
.getheader('Location') or response
.getheader('URI')
120 new_url
= urlparse
.urljoin(url
, new_url_rel
)
122 raise SafeException("HTTP error: got status code %s" % response
.status
)
127 result("Moved", 'YELLOW')
128 checking("Checking new URL %s" % new_url
)
130 return get_http_size(new_url
, ttl
- 1)
132 raise SafeException('Too many redirections.')
134 def get_ftp_size(url
):
135 address
= urlparse
.urlparse(url
)
136 ftp
= ftplib
.FTP(host(address
))
139 ftp
.voidcmd('TYPE I')
140 return ftp
.size(url
.split('/', 3)[3])
145 scheme
= urlparse
.urlparse(url
)[0].lower()
146 if scheme
.startswith('http') or scheme
.startswith('https'):
147 return get_http_size(url
)
148 elif scheme
.startswith('ftp'):
149 return get_ftp_size(url
)
151 raise SafeException("Unknown scheme '%s' in '%s'" % (scheme
, url
))
153 def check_source(source
):
154 if hasattr(source
, 'url'):
155 checking("Checking archive %s" % source
.url
)
156 actual_size
= get_size(source
.url
)
157 if actual_size
is None:
158 result("No Content-Length for archive; can't check", 'YELLOW')
160 actual_size
= int(actual_size
)
161 expected_size
= source
.size
+ (source
.start_offset
or 0)
162 if actual_size
!= expected_size
:
164 raise SafeException("Expected archive to have a size of %d, but server says it is %d" %
165 (expected_size
, actual_size
))
167 elif hasattr(source
, 'steps'):
168 for step
in source
.steps
:
171 existing_urls
= set()
172 def check_exists(url
):
173 if url
in existing_urls
: return # Already checked
175 checking("Checking URL exists %s" % url
)
178 existing_urls
.add(url
)
183 feed
= to_check
.pop()
185 info("Already checked feed %s", feed
)
190 checking("Checking " + feed
, indent
= 0)
193 if feed
.startswith('/'):
194 with
open(feed
) as stream
:
195 feed_obj
= model
.ZeroInstallFeed(qdom
.parse(stream
), local_path
= feed
)
198 tmp
= tempfile
.TemporaryFile(prefix
= 'feedlint-')
200 stream
= urllib2
.urlopen(feed
)
201 shutil
.copyfileobj(stream
, tmp
)
206 elif start
== '-----':
207 result('Old sig', colour
= 'YELLOW')
208 error_new_line(' Feed has an old-style plain GPG signature. Use 0publish --xmlsign.',
212 error_new_line(' Unknown format. File starts "%s"' % start
)
215 data
, sigs
= gpg
.check_stream(tmp
)
218 if isinstance(s
, gpg
.ValidSig
):
219 check_key(feed
, s
.fingerprint
)
221 raise SafeException("Can't check sig: %s" % s
)
223 feed_obj
= model
.ZeroInstallFeed(qdom
.parse(data
))
225 if feed_obj
.url
!= feed
:
226 raise SafeException('Incorrect URL "%s"' % feed_obj
.url
)
231 for f
in feed_obj
.feeds
:
232 info("Will check feed %s", f
.uri
)
233 to_check
.append(f
.uri
)
235 highest_version
= None
236 for impl
in feed_obj
.implementations
.values():
237 if hasattr(impl
, 'dependencies'):
238 for r
in impl
.dependencies
.values():
239 if r
.interface
not in checked
:
240 info("Will check dependency %s", r
)
241 if options
.dependencies
:
242 to_check
.append(r
.interface
)
244 check_exists(r
.interface
)
245 if hasattr(impl
, 'download_sources') and not options
.skip_archives
:
246 for source
in impl
.download_sources
:
248 stability
= impl
.upstream_stability
or model
.testing
249 if highest_version
is None or impl
.version
> highest_version
.version
:
250 highest_version
= impl
251 if stability
== model
.testing
:
253 if not impl
.released
:
254 testing_error
= "No release data on testing version"
257 released
= time
.strptime(impl
.released
, '%Y-%m-%d')
258 except ValueError, ex
:
259 testing_error
= "Can't parse date"
261 ago
= now
- time
.mktime(released
)
263 testing_error
= 'Release data is in the future!'
265 raise SafeException("Version %s: %s (released %s)" % (impl
.get_version(), testing_error
, impl
.released
))
267 if highest_version
and (highest_version
.upstream_stability
or model
.testing
) is model
.testing
:
268 print highlight(' Highest version (%s) is still "testing"' % highest_version
.get_version(), 'YELLOW')
270 for homepage
in feed_obj
.get_metadata(namespaces
.XMLNS_IFACE
, 'homepage'):
271 check_exists(homepage
.content
)
273 for icon
in feed_obj
.get_metadata(namespaces
.XMLNS_IFACE
, 'icon'):
274 check_exists(icon
.getAttribute('href'))
276 except (urllib2
.HTTPError
, httplib
.BadStatusLine
, socket
.error
, ftplib
.error_perm
), ex
:
277 err_msg
= str(ex
).strip() or str(type(ex
))
278 error_new_line(' ' + err_msg
)
280 if options
.verbose
: traceback
.print_exc()
281 except SafeException
, ex
:
282 if options
.verbose
: raise
283 error_new_line(' ' + str(ex
))
289 print "\nERRORS FOUND:", n_errors