ctdb-daemon: Use ctdb_parse_node_address() in ctdbd
[samba4-gss.git] / third_party / waf / waflib / extras / distnet.py
blob8084b156b14f7e38f70d5fc8aef4701bbb8176e7
1 #! /usr/bin/env python
2 # encoding: utf-8
4 """
5 waf-powered distributed network builds, with a network cache.
7 Caching files from a server has advantages over a NFS/Samba shared folder:
9 - builds are much faster because they use local files
10 - builds just continue to work in case of a network glitch
11 - permissions are much simpler to manage
12 """
14 import os, urllib, tarfile, re, shutil, tempfile, sys
15 from collections import OrderedDict
16 from waflib import Context, Utils, Logs
18 try:
19 from urllib.parse import urlencode
20 except ImportError:
21 urlencode = urllib.urlencode
23 def safe_urlencode(data):
24 x = urlencode(data)
25 try:
26 x = x.encode('utf-8')
27 except Exception:
28 pass
29 return x
31 try:
32 from urllib.error import URLError
33 except ImportError:
34 from urllib2 import URLError
36 try:
37 from urllib.request import Request, urlopen
38 except ImportError:
39 from urllib2 import Request, urlopen
41 DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
42 DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
43 TARFORMAT = 'w:bz2'
44 TIMEOUT = 60
45 REQUIRES = 'requires.txt'
47 re_com = re.compile(r'\s*#.*', re.M)
49 def total_version_order(num):
50 lst = num.split('.')
51 template = '%10s' * len(lst)
52 ret = template % tuple(lst)
53 return ret
55 def get_distnet_cache():
56 return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
58 def get_server_url():
59 return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
61 def get_download_url():
62 return '%s/download.py' % get_server_url()
64 def get_upload_url():
65 return '%s/upload.py' % get_server_url()
67 def get_resolve_url():
68 return '%s/resolve.py' % get_server_url()
70 def send_package_name():
71 out = getattr(Context.g_module, 'out', 'build')
72 pkgfile = '%s/package_to_upload.tarfile' % out
73 return pkgfile
75 class package(Context.Context):
76 fun = 'package'
77 cmd = 'package'
79 def execute(self):
80 try:
81 files = self.files
82 except AttributeError:
83 files = self.files = []
85 Context.Context.execute(self)
86 pkgfile = send_package_name()
87 if not pkgfile in files:
88 if not REQUIRES in files:
89 files.append(REQUIRES)
90 self.make_tarfile(pkgfile, files, add_to_package=False)
92 def make_tarfile(self, filename, files, **kw):
93 if kw.get('add_to_package', True):
94 self.files.append(filename)
96 with tarfile.open(filename, TARFORMAT) as tar:
97 endname = os.path.split(filename)[-1]
98 endname = endname.split('.')[0] + '/'
99 for x in files:
100 tarinfo = tar.gettarinfo(x, x)
101 tarinfo.uid = tarinfo.gid = 0
102 tarinfo.uname = tarinfo.gname = 'root'
103 tarinfo.size = os.stat(x).st_size
104 if os.environ.get('SOURCE_DATE_EPOCH'):
105 tarinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH'))
107 # TODO - more archive creation options?
108 if kw.get('bare', True):
109 tarinfo.name = os.path.split(x)[1]
110 else:
111 tarinfo.name = endname + x # todo, if tuple, then..
112 Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
113 with open(x, 'rb') as f:
114 tar.addfile(tarinfo, f)
115 Logs.info('Created %s', filename)
117 class publish(Context.Context):
118 fun = 'publish'
119 cmd = 'publish'
120 def execute(self):
121 if hasattr(Context.g_module, 'publish'):
122 Context.Context.execute(self)
123 mod = Context.g_module
125 rfile = getattr(self, 'rfile', send_package_name())
126 if not os.path.isfile(rfile):
127 self.fatal('Create the release file with "waf release" first! %r' % rfile)
129 fdata = Utils.readf(rfile, m='rb')
130 data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
132 req = Request(get_upload_url(), data)
133 response = urlopen(req, timeout=TIMEOUT)
134 data = response.read().strip()
136 if sys.hexversion>0x300000f:
137 data = data.decode('utf-8')
139 if data != 'ok':
140 self.fatal('Could not publish the package %r' % data)
142 class constraint(object):
143 def __init__(self, line=''):
144 self.required_line = line
145 self.info = []
147 line = line.strip()
148 if not line:
149 return
151 lst = line.split(',')
152 if lst:
153 self.pkgname = lst[0]
154 self.required_version = lst[1]
155 for k in lst:
156 a, b, c = k.partition('=')
157 if a and c:
158 self.info.append((a, c))
159 def __str__(self):
160 buf = []
161 buf.append(self.pkgname)
162 buf.append(self.required_version)
163 for k in self.info:
164 buf.append('%s=%s' % k)
165 return ','.join(buf)
167 def __repr__(self):
168 return "requires %s-%s" % (self.pkgname, self.required_version)
170 def human_display(self, pkgname, pkgver):
171 return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
173 def why(self):
174 ret = []
175 for x in self.info:
176 if x[0] == 'reason':
177 ret.append(x[1])
178 return ret
180 def add_reason(self, reason):
181 self.info.append(('reason', reason))
183 def parse_constraints(text):
184 assert(text is not None)
185 constraints = []
186 text = re.sub(re_com, '', text)
187 lines = text.splitlines()
188 for line in lines:
189 line = line.strip()
190 if not line:
191 continue
192 constraints.append(constraint(line))
193 return constraints
195 def list_package_versions(cachedir, pkgname):
196 pkgdir = os.path.join(cachedir, pkgname)
197 try:
198 versions = os.listdir(pkgdir)
199 except OSError:
200 return []
201 versions.sort(key=total_version_order)
202 versions.reverse()
203 return versions
205 class package_reader(Context.Context):
206 cmd = 'solver'
207 fun = 'solver'
209 def __init__(self, **kw):
210 Context.Context.__init__(self, **kw)
212 self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
213 self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
214 self.cache_constraints = {}
215 self.constraints = []
217 def compute_dependencies(self, filename=REQUIRES):
218 text = Utils.readf(filename)
219 data = safe_urlencode([('text', text)])
221 if '--offline' in sys.argv:
222 self.constraints = self.local_resolve(text)
223 else:
224 req = Request(get_resolve_url(), data)
225 try:
226 response = urlopen(req, timeout=TIMEOUT)
227 except URLError as e:
228 Logs.warn('The package server is down! %r', e)
229 self.constraints = self.local_resolve(text)
230 else:
231 ret = response.read()
232 try:
233 ret = ret.decode('utf-8')
234 except Exception:
235 pass
236 self.trace(ret)
237 self.constraints = parse_constraints(ret)
238 self.check_errors()
240 def check_errors(self):
241 errors = False
242 for c in self.constraints:
243 if not c.required_version:
244 errors = True
246 reasons = c.why()
247 if len(reasons) == 1:
248 Logs.error('%s but no matching package could be found in this repository', reasons[0])
249 else:
250 Logs.error('Conflicts on package %r:', c.pkgname)
251 for r in reasons:
252 Logs.error(' %s', r)
253 if errors:
254 self.fatal('The package requirements cannot be satisfied!')
256 def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
257 try:
258 return self.cache_constraints[(pkgname, pkgver)]
259 except KeyError:
260 text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
261 ret = parse_constraints(text)
262 self.cache_constraints[(pkgname, pkgver)] = ret
263 return ret
265 def apply_constraint(self, domain, constraint):
266 vname = constraint.required_version.replace('*', '.*')
267 rev = re.compile(vname, re.M)
268 ret = [x for x in domain if rev.match(x)]
269 return ret
271 def trace(self, *k):
272 if getattr(self, 'debug', None):
273 Logs.error(*k)
275 def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
276 # breadth first search
277 n_packages_to_versions = dict(packages_to_versions)
278 n_packages_to_constraints = dict(packages_to_constraints)
280 self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
281 done = done + [pkgname]
283 constraints = self.load_constraints(pkgname, pkgver)
284 self.trace("constraints %r" % constraints)
286 for k in constraints:
287 try:
288 domain = n_packages_to_versions[k.pkgname]
289 except KeyError:
290 domain = list_package_versions(get_distnet_cache(), k.pkgname)
293 self.trace("constraints?")
294 if not k.pkgname in done:
295 todo = todo + [k.pkgname]
297 self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
299 # apply the constraint
300 domain = self.apply_constraint(domain, k)
302 self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
304 n_packages_to_versions[k.pkgname] = domain
306 # then store the constraint applied
307 constraints = list(packages_to_constraints.get(k.pkgname, []))
308 constraints.append((pkgname, pkgver, k))
309 n_packages_to_constraints[k.pkgname] = constraints
311 if not domain:
312 self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
313 return (n_packages_to_versions, n_packages_to_constraints)
315 # next package on the todo list
316 if not todo:
317 return (n_packages_to_versions, n_packages_to_constraints)
319 n_pkgname = todo[0]
320 n_pkgver = n_packages_to_versions[n_pkgname][0]
321 tmp = dict(n_packages_to_versions)
322 tmp[n_pkgname] = [n_pkgver]
324 self.trace("fixed point %s" % n_pkgname)
326 return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
328 def get_results(self):
329 return '\n'.join([str(c) for c in self.constraints])
331 def solution_to_constraints(self, versions, constraints):
332 solution = []
333 for p in versions:
334 c = constraint()
335 solution.append(c)
337 c.pkgname = p
338 if versions[p]:
339 c.required_version = versions[p][0]
340 else:
341 c.required_version = ''
342 for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
343 c.add_reason(c2.human_display(from_pkgname, from_pkgver))
344 return solution
346 def local_resolve(self, text):
347 self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
348 p2v = OrderedDict({self.myproject: [self.myversion]})
349 (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
350 return self.solution_to_constraints(versions, constraints)
352 def download_to_file(self, pkgname, pkgver, subdir, tmp):
353 data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
354 req = urlopen(get_download_url(), data, timeout=TIMEOUT)
355 with open(tmp, 'wb') as f:
356 while True:
357 buf = req.read(8192)
358 if not buf:
359 break
360 f.write(buf)
362 def extract_tar(self, subdir, pkgdir, tmpfile):
363 with tarfile.open(tmpfile) as f:
364 temp = tempfile.mkdtemp(dir=pkgdir)
365 try:
366 f.extractall(temp)
367 os.rename(temp, os.path.join(pkgdir, subdir))
368 finally:
369 try:
370 shutil.rmtree(temp)
371 except Exception:
372 pass
374 def get_pkg_dir(self, pkgname, pkgver, subdir):
375 pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
376 if not os.path.isdir(pkgdir):
377 os.makedirs(pkgdir)
379 target = os.path.join(pkgdir, subdir)
381 if os.path.exists(target):
382 return target
384 (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
385 try:
386 os.close(fd)
387 self.download_to_file(pkgname, pkgver, subdir, tmp)
388 if subdir == REQUIRES:
389 os.rename(tmp, target)
390 else:
391 self.extract_tar(subdir, pkgdir, tmp)
392 finally:
393 try:
394 os.remove(tmp)
395 except OSError:
396 pass
398 return target
400 def __iter__(self):
401 if not self.constraints:
402 self.compute_dependencies()
403 for x in self.constraints:
404 if x.pkgname == self.myproject:
405 continue
406 yield x
408 def execute(self):
409 self.compute_dependencies()
411 packages = package_reader()
413 def load_tools(ctx, extra):
414 global packages
415 for c in packages:
416 packages.get_pkg_dir(c.pkgname, c.required_version, extra)
417 noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
418 for x in os.listdir(noarchdir):
419 if x.startswith('waf_') and x.endswith('.py'):
420 ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
422 def options(opt):
423 opt.add_option('--offline', action='store_true')
424 packages.execute()
425 load_tools(opt, REQUIRES)
427 def configure(conf):
428 load_tools(conf, conf.variant)
430 def build(bld):
431 load_tools(bld, bld.variant)