1 # This script is part of 0export
2 # Copyright (C) 2010, Thomas Leonard
3 # See http://0install.net for details.
5 # This file goes inside the generated setup.sh archive
6 # It runs or installs the program
8 import os
, sys
, subprocess
, tempfile
, tarfile
, gobject
, signal
11 mydir
= os
.path
.dirname(os
.path
.abspath(sys
.argv
[0]))
12 zidir
= os
.path
.join(mydir
, 'zeroinstall')
13 sys
.path
.insert(0, zidir
)
14 feeds_dir
= os
.path
.join(mydir
, 'feeds')
15 pypath
= os
.environ
.get('PYTHONPATH')
20 os
.environ
['PYTHONPATH'] = zidir
+ pypath
22 from zeroinstall
.injector
import gpg
, trust
, qdom
, iface_cache
, driver
, handler
, model
, namespaces
, config
, requirements
23 from zeroinstall
.support
import basedir
, find_in_path
24 from zeroinstall
import SafeException
, zerostore
25 from zeroinstall
.gtkui
import xdgutils
28 config
= config
.load_config(handler
= h
)
30 if not os
.path
.isdir(feeds_dir
):
31 print >>sys
.stderr
, "Directory %s not found." % feeds_dir
32 print >>sys
.stderr
, "This script should be run from an unpacked setup.sh archive."
33 print >>sys
.stderr
, "(are you trying to install 0export? you're in the wrong place!)"
36 def check_call(*args
, **kwargs
):
37 exitstatus
= subprocess
.call(*args
, **kwargs
)
39 raise SafeException("Command failed with exit code %d:\n%s" % (exitstatus
, ' '.join(args
[0])))
47 def lookup(self
, digest
):
48 if digest
in self
.impls
:
49 return "/fake/" + digest
54 return find_in_path('gpg') or find_in_path('gpg2')
61 if self
.child
is not None:
62 os
.kill(self
.child
.pid
, signal
.SIGTERM
)
66 def do_install(self
, archive_stream
, progress_bar
, archive_offset
):
67 # Step 1. Import GPG keys
69 # Maybe GPG has never been run before. Let it initialse, or we'll get an error code
70 # from the first import... (ignore return value here)
71 subprocess
.call([get_gpg(), '--check-trustdb', '-q'])
73 key_dir
= os
.path
.join(mydir
, 'keys')
74 for key
in os
.listdir(key_dir
):
75 check_call([get_gpg(), '--import', '-q', os
.path
.join(key_dir
, key
)])
77 # Step 2. Import feeds and trust their signing keys
78 for root
, dirs
, files
in os
.walk(os
.path
.join(mydir
, 'feeds')):
79 if 'latest.xml' in files
:
80 feed_path
= os
.path
.join(root
, 'latest.xml')
81 icon_path
= os
.path
.join(root
, 'icon.png')
84 feed_stream
= file(feed_path
)
85 doc
= qdom
.parse(feed_stream
)
86 uri
= doc
.getAttribute('uri')
87 assert uri
, "Missing 'uri' attribute on root element in '%s'" % feed_path
88 domain
= trust
.domain_from_url(uri
)
91 stream
, sigs
= gpg
.check_stream(feed_stream
)
93 assert isinstance(s
, gpg
.ValidSig
), str(s
)
94 if not trust
.trust_db
.is_trusted(s
.fingerprint
, domain
):
95 print "Adding key %s to trusted list for %s" % (s
.fingerprint
, domain
)
96 trust
.trust_db
.trust_key(s
.fingerprint
, domain
)
97 oldest_sig
= min([s
.get_timestamp() for s
in sigs
])
99 config
.iface_cache
.update_feed_from_network(uri
, stream
.read(), oldest_sig
)
100 except iface_cache
.ReplayAttack
:
101 # OK, the user has a newer copy already
103 if feed_stream
!= stream
:
107 if os
.path
.exists(icon_path
):
108 icons_cache
= basedir
.save_cache_path(namespaces
.config_site
, 'interface_icons')
109 icon_file
= os
.path
.join(icons_cache
, model
.escape(uri
))
110 if not os
.path
.exists(icon_file
):
111 shutil
.copyfile(icon_path
, icon_file
)
113 # Step 3. Solve to find out which implementations we actually need
114 archive_stream
.seek(archive_offset
)
118 extract_impls
= {} # Impls we need but which are compressed (ID -> Impl)
119 tmp
= tempfile
.mkdtemp(prefix
= '0export-')
121 # Create a "fake store" with the implementation in the archive
122 archive
= tarfile
.open(name
=archive_stream
.name
, mode
='r|', fileobj
=archive_stream
)
123 fake_store
= FakeStore()
124 for tarmember
in archive
:
125 if tarmember
.name
.startswith('implementations'):
126 impl
= os
.path
.basename(tarmember
.name
).split('.')[0]
127 fake_store
.impls
.add(impl
)
129 bootstrap_store
= zerostore
.Store(os
.path
.join(mydir
, 'implementations'))
130 stores
= config
.stores
132 toplevel_uris
= [uri
.strip() for uri
in file(os
.path
.join(mydir
, 'toplevel_uris'))]
133 for uri
in toplevel_uris
:
134 # This is so the solver treats versions in the setup archive as 'cached',
135 # meaning that it will prefer using them to doing a download
136 stores
.stores
.append(bootstrap_store
)
137 stores
.stores
.append(fake_store
)
139 # Shouldn't need to download anything, but we might not have all feeds
140 r
= requirements
.Requirements(uri
)
141 d
= driver
.Driver(config
= config
, requirements
= r
)
142 config
.network_use
= model
.network_minimal
143 download_feeds
= d
.solve_with_downloads()
144 h
.wait_for_blocker(download_feeds
)
145 assert d
.solver
.ready
, d
.solver
.get_failure_reason()
147 # Add anything chosen from the setup store to the main store
148 stores
.stores
.remove(fake_store
)
149 stores
.stores
.remove(bootstrap_store
)
150 for iface
, impl
in d
.get_uncached_implementations():
151 print >>sys
.stderr
, "Need to import", impl
152 if impl
.id in fake_store
.impls
:
154 extract_impls
[impl
.id] = impl
156 impl_src
= os
.path
.join(mydir
, 'implementations', impl
.id)
158 if os
.path
.isdir(impl_src
):
159 stores
.add_dir_to_cache(impl
.id, impl_src
)
161 print >>sys
.stderr
, "Required impl %s (for %s) not present" % (impl
, iface
)
163 selections
.append(d
.solver
.selections
)
167 # Count total number of bytes to extract
169 for impl
in extract_impls
.values():
170 impl_info
= archive
.getmember('implementations/' + impl
.id + '.tar.bz2')
171 extract_total
+= impl_info
.size
175 # Actually extract+import implementations in archive
176 archive_stream
.seek(archive_offset
)
177 archive
= tarfile
.open(name
=archive_stream
.name
, mode
='r|', fileobj
=archive_stream
)
179 for tarmember
in archive
:
180 if not tarmember
.name
.startswith('implementations'):
182 impl_id
= tarmember
.name
.split('/')[1].split('.')[0]
183 if impl_id
not in extract_impls
:
184 print "Skip", impl_id
186 print "Extracting", impl_id
187 tmp
= tempfile
.mkdtemp(prefix
= '0export-')
189 impl_stream
= archive
.extractfile(tarmember
)
190 self
.child
= subprocess
.Popen('bunzip2|tar xf -', shell
= True, stdin
= subprocess
.PIPE
, cwd
= tmp
)
191 mainloop
= gobject
.MainLoop(gobject
.main_context_default())
193 def pipe_ready(src
, cond
):
194 data
= impl_stream
.read(4096)
197 self
.child
.stdin
.close()
199 self
.sent
+= len(data
)
201 progress_bar
.set_fraction(float(self
.sent
) / extract_total
)
202 self
.child
.stdin
.write(data
)
204 gobject
.io_add_watch(self
.child
.stdin
, gobject
.IO_OUT | gobject
.IO_HUP
, pipe_ready
, priority
= gobject
.PRIORITY_LOW
)
209 if self
.child
.returncode
:
210 raise Exception("Failed to unpack archive (code %d)" % self
.child
.returncode
)
212 stores
.add_dir_to_cache(impl_id
, tmp
)
219 def add_to_menu(uris
):
221 iface
= config
.iface_cache
.get_interface(uri
)
222 icon_path
= config
.iface_cache
.get_icon_path(iface
)
225 for meta
in iface
.get_metadata(namespaces
.XMLNS_IFACE
, 'category'):
228 raise Exception("Invalid category '%s'" % c
)
232 xdgutils
.add_to_menu(iface
, icon_path
, feed_category
)
234 if find_in_path('0launch'):
237 if find_in_path('sudo') and find_in_path('gnome-terminal') and find_in_path('apt-get'):
238 check_call(['gnome-terminal', '--disable-factory', '-x', 'sh', '-c',
239 'echo "We need to install the zeroinstall-injector package to make the menu items work."; '
240 'sudo apt-get install zeroinstall-injector || sleep 4'])
242 if find_in_path('0launch'):
246 box
= gtk
.MessageDialog(None, 0, buttons
= gtk
.BUTTONS_OK
)
247 box
.set_markup("The new menu item won't work until the '<b>zeroinstall-injector</b>' package is installed.\n"
248 "Please install it using your distribution's package manager.")