5 waf-powered distributed network builds, with a network cache.
7 Caching files from a server has advantages over a NFS/Samba shared folder:
9 - builds are much faster because they use local files
10 - builds just continue to work in case of a network glitch
11 - permissions are much simpler to manage
14 import os
, urllib
, tarfile
, re
, shutil
, tempfile
, sys
15 from collections
import OrderedDict
16 from waflib
import Context
, Utils
, Logs
19 from urllib
.parse
import urlencode
21 urlencode
= urllib
.urlencode
23 def safe_urlencode(data
):
32 from urllib
.error
import URLError
34 from urllib2
import URLError
37 from urllib
.request
import Request
, urlopen
39 from urllib2
import Request
, urlopen
41 DISTNETCACHE
= os
.environ
.get('DISTNETCACHE', '/tmp/distnetcache')
42 DISTNETSERVER
= os
.environ
.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
45 REQUIRES
= 'requires.txt'
47 re_com
= re
.compile(r
'\s*#.*', re
.M
)
49 def total_version_order(num
):
51 template
= '%10s' * len(lst
)
52 ret
= template
% tuple(lst
)
55 def get_distnet_cache():
56 return getattr(Context
.g_module
, 'DISTNETCACHE', DISTNETCACHE
)
59 return getattr(Context
.g_module
, 'DISTNETSERVER', DISTNETSERVER
)
61 def get_download_url():
62 return '%s/download.py' % get_server_url()
65 return '%s/upload.py' % get_server_url()
67 def get_resolve_url():
68 return '%s/resolve.py' % get_server_url()
70 def send_package_name():
71 out
= getattr(Context
.g_module
, 'out', 'build')
72 pkgfile
= '%s/package_to_upload.tarfile' % out
75 class package(Context
.Context
):
82 except AttributeError:
83 files
= self
.files
= []
85 Context
.Context
.execute(self
)
86 pkgfile
= send_package_name()
87 if not pkgfile
in files
:
88 if not REQUIRES
in files
:
89 files
.append(REQUIRES
)
90 self
.make_tarfile(pkgfile
, files
, add_to_package
=False)
92 def make_tarfile(self
, filename
, files
, **kw
):
93 if kw
.get('add_to_package', True):
94 self
.files
.append(filename
)
96 with tarfile
.open(filename
, TARFORMAT
) as tar
:
97 endname
= os
.path
.split(filename
)[-1]
98 endname
= endname
.split('.')[0] + '/'
100 tarinfo
= tar
.gettarinfo(x
, x
)
101 tarinfo
.uid
= tarinfo
.gid
= 0
102 tarinfo
.uname
= tarinfo
.gname
= 'root'
103 tarinfo
.size
= os
.stat(x
).st_size
104 if os
.environ
.get('SOURCE_DATE_EPOCH'):
105 tarinfo
.mtime
= int(os
.environ
.get('SOURCE_DATE_EPOCH'))
107 # TODO - more archive creation options?
108 if kw
.get('bare', True):
109 tarinfo
.name
= os
.path
.split(x
)[1]
111 tarinfo
.name
= endname
+ x
# todo, if tuple, then..
112 Logs
.debug('distnet: adding %r to %s', tarinfo
.name
, filename
)
113 with
open(x
, 'rb') as f
:
114 tar
.addfile(tarinfo
, f
)
115 Logs
.info('Created %s', filename
)
117 class publish(Context
.Context
):
121 if hasattr(Context
.g_module
, 'publish'):
122 Context
.Context
.execute(self
)
123 mod
= Context
.g_module
125 rfile
= getattr(self
, 'rfile', send_package_name())
126 if not os
.path
.isfile(rfile
):
127 self
.fatal('Create the release file with "waf release" first! %r' % rfile
)
129 fdata
= Utils
.readf(rfile
, m
='rb')
130 data
= safe_urlencode([('pkgdata', fdata
), ('pkgname', mod
.APPNAME
), ('pkgver', mod
.VERSION
)])
132 req
= Request(get_upload_url(), data
)
133 response
= urlopen(req
, timeout
=TIMEOUT
)
134 data
= response
.read().strip()
136 if sys
.hexversion
>0x300000f:
137 data
= data
.decode('utf-8')
140 self
.fatal('Could not publish the package %r' % data
)
142 class constraint(object):
143 def __init__(self
, line
=''):
144 self
.required_line
= line
151 lst
= line
.split(',')
153 self
.pkgname
= lst
[0]
154 self
.required_version
= lst
[1]
156 a
, b
, c
= k
.partition('=')
158 self
.info
.append((a
, c
))
161 buf
.append(self
.pkgname
)
162 buf
.append(self
.required_version
)
164 buf
.append('%s=%s' % k
)
168 return "requires %s-%s" % (self
.pkgname
, self
.required_version
)
170 def human_display(self
, pkgname
, pkgver
):
171 return '%s-%s requires %s-%s' % (pkgname
, pkgver
, self
.pkgname
, self
.required_version
)
180 def add_reason(self
, reason
):
181 self
.info
.append(('reason', reason
))
183 def parse_constraints(text
):
184 assert(text
is not None)
186 text
= re
.sub(re_com
, '', text
)
187 lines
= text
.splitlines()
192 constraints
.append(constraint(line
))
195 def list_package_versions(cachedir
, pkgname
):
196 pkgdir
= os
.path
.join(cachedir
, pkgname
)
198 versions
= os
.listdir(pkgdir
)
201 versions
.sort(key
=total_version_order
)
205 class package_reader(Context
.Context
):
209 def __init__(self
, **kw
):
210 Context
.Context
.__init
__(self
, **kw
)
212 self
.myproject
= getattr(Context
.g_module
, 'APPNAME', 'project')
213 self
.myversion
= getattr(Context
.g_module
, 'VERSION', '1.0')
214 self
.cache_constraints
= {}
215 self
.constraints
= []
217 def compute_dependencies(self
, filename
=REQUIRES
):
218 text
= Utils
.readf(filename
)
219 data
= safe_urlencode([('text', text
)])
221 if '--offline' in sys
.argv
:
222 self
.constraints
= self
.local_resolve(text
)
224 req
= Request(get_resolve_url(), data
)
226 response
= urlopen(req
, timeout
=TIMEOUT
)
227 except URLError
as e
:
228 Logs
.warn('The package server is down! %r', e
)
229 self
.constraints
= self
.local_resolve(text
)
231 ret
= response
.read()
233 ret
= ret
.decode('utf-8')
237 self
.constraints
= parse_constraints(ret
)
240 def check_errors(self
):
242 for c
in self
.constraints
:
243 if not c
.required_version
:
247 if len(reasons
) == 1:
248 Logs
.error('%s but no matching package could be found in this repository', reasons
[0])
250 Logs
.error('Conflicts on package %r:', c
.pkgname
)
254 self
.fatal('The package requirements cannot be satisfied!')
256 def load_constraints(self
, pkgname
, pkgver
, requires
=REQUIRES
):
258 return self
.cache_constraints
[(pkgname
, pkgver
)]
260 text
= Utils
.readf(os
.path
.join(get_distnet_cache(), pkgname
, pkgver
, requires
))
261 ret
= parse_constraints(text
)
262 self
.cache_constraints
[(pkgname
, pkgver
)] = ret
265 def apply_constraint(self
, domain
, constraint
):
266 vname
= constraint
.required_version
.replace('*', '.*')
267 rev
= re
.compile(vname
, re
.M
)
268 ret
= [x
for x
in domain
if rev
.match(x
)]
272 if getattr(self
, 'debug', None):
275 def solve(self
, packages_to_versions
={}, packages_to_constraints
={}, pkgname
='', pkgver
='', todo
=[], done
=[]):
276 # breadth first search
277 n_packages_to_versions
= dict(packages_to_versions
)
278 n_packages_to_constraints
= dict(packages_to_constraints
)
280 self
.trace("calling solve with %r %r %r" % (packages_to_versions
, todo
, done
))
281 done
= done
+ [pkgname
]
283 constraints
= self
.load_constraints(pkgname
, pkgver
)
284 self
.trace("constraints %r" % constraints
)
286 for k
in constraints
:
288 domain
= n_packages_to_versions
[k
.pkgname
]
290 domain
= list_package_versions(get_distnet_cache(), k
.pkgname
)
293 self
.trace("constraints?")
294 if not k
.pkgname
in done
:
295 todo
= todo
+ [k
.pkgname
]
297 self
.trace("domain before %s -> %s, %r" % (pkgname
, k
.pkgname
, domain
))
299 # apply the constraint
300 domain
= self
.apply_constraint(domain
, k
)
302 self
.trace("domain after %s -> %s, %r" % (pkgname
, k
.pkgname
, domain
))
304 n_packages_to_versions
[k
.pkgname
] = domain
306 # then store the constraint applied
307 constraints
= list(packages_to_constraints
.get(k
.pkgname
, []))
308 constraints
.append((pkgname
, pkgver
, k
))
309 n_packages_to_constraints
[k
.pkgname
] = constraints
312 self
.trace("no domain while processing constraint %r from %r %r" % (domain
, pkgname
, pkgver
))
313 return (n_packages_to_versions
, n_packages_to_constraints
)
315 # next package on the todo list
317 return (n_packages_to_versions
, n_packages_to_constraints
)
320 n_pkgver
= n_packages_to_versions
[n_pkgname
][0]
321 tmp
= dict(n_packages_to_versions
)
322 tmp
[n_pkgname
] = [n_pkgver
]
324 self
.trace("fixed point %s" % n_pkgname
)
326 return self
.solve(tmp
, n_packages_to_constraints
, n_pkgname
, n_pkgver
, todo
[1:], done
)
328 def get_results(self
):
329 return '\n'.join([str(c
) for c
in self
.constraints
])
331 def solution_to_constraints(self
, versions
, constraints
):
339 c
.required_version
= versions
[p
][0]
341 c
.required_version
= ''
342 for (from_pkgname
, from_pkgver
, c2
) in constraints
.get(p
, ''):
343 c
.add_reason(c2
.human_display(from_pkgname
, from_pkgver
))
346 def local_resolve(self
, text
):
347 self
.cache_constraints
[(self
.myproject
, self
.myversion
)] = parse_constraints(text
)
348 p2v
= OrderedDict({self
.myproject
: [self
.myversion
]})
349 (versions
, constraints
) = self
.solve(p2v
, {}, self
.myproject
, self
.myversion
, [])
350 return self
.solution_to_constraints(versions
, constraints
)
352 def download_to_file(self
, pkgname
, pkgver
, subdir
, tmp
):
353 data
= safe_urlencode([('pkgname', pkgname
), ('pkgver', pkgver
), ('pkgfile', subdir
)])
354 req
= urlopen(get_download_url(), data
, timeout
=TIMEOUT
)
355 with
open(tmp
, 'wb') as f
:
362 def extract_tar(self
, subdir
, pkgdir
, tmpfile
):
363 with tarfile
.open(tmpfile
) as f
:
364 temp
= tempfile
.mkdtemp(dir=pkgdir
)
367 os
.rename(temp
, os
.path
.join(pkgdir
, subdir
))
374 def get_pkg_dir(self
, pkgname
, pkgver
, subdir
):
375 pkgdir
= os
.path
.join(get_distnet_cache(), pkgname
, pkgver
)
376 if not os
.path
.isdir(pkgdir
):
379 target
= os
.path
.join(pkgdir
, subdir
)
381 if os
.path
.exists(target
):
384 (fd
, tmp
) = tempfile
.mkstemp(dir=pkgdir
)
387 self
.download_to_file(pkgname
, pkgver
, subdir
, tmp
)
388 if subdir
== REQUIRES
:
389 os
.rename(tmp
, target
)
391 self
.extract_tar(subdir
, pkgdir
, tmp
)
401 if not self
.constraints
:
402 self
.compute_dependencies()
403 for x
in self
.constraints
:
404 if x
.pkgname
== self
.myproject
:
409 self
.compute_dependencies()
411 packages
= package_reader()
413 def load_tools(ctx
, extra
):
416 packages
.get_pkg_dir(c
.pkgname
, c
.required_version
, extra
)
417 noarchdir
= packages
.get_pkg_dir(c
.pkgname
, c
.required_version
, 'noarch')
418 for x
in os
.listdir(noarchdir
):
419 if x
.startswith('waf_') and x
.endswith('.py'):
420 ctx
.load([x
.rstrip('.py')], tooldir
=[noarchdir
])
423 opt
.add_option('--offline', action
='store_true')
425 load_tools(opt
, REQUIRES
)
428 load_tools(conf
, conf
.variant
)
431 load_tools(bld
, bld
.variant
)