ansible-later: 2.0.22 -> 2.0.23
[NixPkgs.git] / maintainers / scripts / pluginupdate.py
blob46788edd236e9799f10eebefaeda3fc7cdbbaa08
1 # Used by pkgs/applications/editors/vim/plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
3 # format:
4 # $ nix run nixpkgs.python3Packages.black -c black update.py
5 # type-check:
6 # $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
7 # linted:
8 # $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
10 import argparse
11 import csv
12 import functools
13 import http
14 import json
15 import os
16 import subprocess
17 import logging
18 import sys
19 import time
20 import traceback
21 import urllib.error
22 import urllib.parse
23 import urllib.request
24 import xml.etree.ElementTree as ET
25 from datetime import datetime
26 from functools import wraps
27 from multiprocessing.dummy import Pool
28 from pathlib import Path
29 from typing import Dict, List, Optional, Tuple, Union, Any, Callable
30 from urllib.parse import urljoin, urlparse
31 from tempfile import NamedTemporaryFile
32 from dataclasses import dataclass, asdict
34 import git
36 ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
37 ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
38 ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
40 LOG_LEVELS = {
41 logging.getLevelName(level): level for level in [
42 logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR ]
45 log = logging.getLogger()
47 def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
48 """Retry calling the decorated function using an exponential backoff.
49 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
50 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
51 (BSD licensed)
52 :param ExceptionToCheck: the exception on which to retry
53 :param tries: number of times to try (not retry) before giving up
54 :param delay: initial delay between retries in seconds
55 :param backoff: backoff multiplier e.g. value of 2 will double the delay
56 each retry
57 """
59 def deco_retry(f: Callable) -> Callable:
60 @wraps(f)
61 def f_retry(*args: Any, **kwargs: Any) -> Any:
62 mtries, mdelay = tries, delay
63 while mtries > 1:
64 try:
65 return f(*args, **kwargs)
66 except ExceptionToCheck as e:
67 print(f"{str(e)}, Retrying in {mdelay} seconds...")
68 time.sleep(mdelay)
69 mtries -= 1
70 mdelay *= backoff
71 return f(*args, **kwargs)
73 return f_retry # true decorator
75 return deco_retry
77 @dataclass
78 class FetchConfig:
79 proc: int
80 github_token: str
83 def make_request(url: str, token=None) -> urllib.request.Request:
84 headers = {}
85 if token is not None:
86 headers["Authorization"] = f"token {token}"
87 return urllib.request.Request(url, headers=headers)
90 # a dictionary of plugins and their new repositories
91 Redirects = Dict['PluginDesc', 'Repo']
93 class Repo:
94 def __init__(
95 self, uri: str, branch: str
96 ) -> None:
97 self.uri = uri
98 '''Url to the repo'''
99 self._branch = branch
100 # Redirect is the new Repo to use
101 self.redirect: Optional['Repo'] = None
102 self.token = "dummy_token"
104 @property
105 def name(self):
106 return self.uri.split('/')[-1]
108 @property
109 def branch(self):
110 return self._branch or "HEAD"
112 def __str__(self) -> str:
113 return f"{self.uri}"
114 def __repr__(self) -> str:
115 return f"Repo({self.name}, {self.uri})"
117 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
118 def has_submodules(self) -> bool:
119 return True
121 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
122 def latest_commit(self) -> Tuple[str, datetime]:
123 log.debug("Latest commit")
124 loaded = self._prefetch(None)
125 updated = datetime.strptime(loaded['date'], "%Y-%m-%dT%H:%M:%S%z")
127 return loaded['rev'], updated
129 def _prefetch(self, ref: Optional[str]):
130 cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
131 if ref is not None:
132 cmd.append(ref)
133 log.debug(cmd)
134 data = subprocess.check_output(cmd)
135 loaded = json.loads(data)
136 return loaded
138 def prefetch(self, ref: Optional[str]) -> str:
139 print("Prefetching")
140 loaded = self._prefetch(ref)
141 return loaded["sha256"]
143 def as_nix(self, plugin: "Plugin") -> str:
144 return f'''fetchgit {{
145 url = "{self.uri}";
146 rev = "{plugin.commit}";
147 sha256 = "{plugin.sha256}";
148 }}'''
151 class RepoGitHub(Repo):
152 def __init__(
153 self, owner: str, repo: str, branch: str
154 ) -> None:
155 self.owner = owner
156 self.repo = repo
157 self.token = None
158 '''Url to the repo'''
159 super().__init__(self.url(""), branch)
160 log.debug("Instantiating github repo owner=%s and repo=%s", self.owner, self.repo)
162 @property
163 def name(self):
164 return self.repo
166 def url(self, path: str) -> str:
167 res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
168 return res
170 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
171 def has_submodules(self) -> bool:
172 try:
173 req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
174 urllib.request.urlopen(req, timeout=10).close()
175 except urllib.error.HTTPError as e:
176 if e.code == 404:
177 return False
178 else:
179 raise
180 return True
182 @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
183 def latest_commit(self) -> Tuple[str, datetime]:
184 commit_url = self.url(f"commits/{self.branch}.atom")
185 log.debug("Sending request to %s", commit_url)
186 commit_req = make_request(commit_url, self.token)
187 with urllib.request.urlopen(commit_req, timeout=10) as req:
188 self._check_for_redirect(commit_url, req)
189 xml = req.read()
190 root = ET.fromstring(xml)
191 latest_entry = root.find(ATOM_ENTRY)
192 assert latest_entry is not None, f"No commits found in repository {self}"
193 commit_link = latest_entry.find(ATOM_LINK)
194 assert commit_link is not None, f"No link tag found feed entry {xml}"
195 url = urlparse(commit_link.get("href"))
196 updated_tag = latest_entry.find(ATOM_UPDATED)
197 assert (
198 updated_tag is not None and updated_tag.text is not None
199 ), f"No updated tag found feed entry {xml}"
200 updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
201 return Path(str(url.path)).name, updated
203 def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
204 response_url = req.geturl()
205 if url != response_url:
206 new_owner, new_name = (
207 urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
210 new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
211 self.redirect = new_repo
214 def prefetch(self, commit: str) -> str:
215 if self.has_submodules():
216 sha256 = super().prefetch(commit)
217 else:
218 sha256 = self.prefetch_github(commit)
219 return sha256
221 def prefetch_github(self, ref: str) -> str:
222 cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
223 log.debug("Running %s", cmd)
224 data = subprocess.check_output(cmd)
225 return data.strip().decode("utf-8")
227 def as_nix(self, plugin: "Plugin") -> str:
228 if plugin.has_submodules:
229 submodule_attr = "\n fetchSubmodules = true;"
230 else:
231 submodule_attr = ""
233 return f'''fetchFromGitHub {{
234 owner = "{self.owner}";
235 repo = "{self.repo}";
236 rev = "{plugin.commit}";
237 sha256 = "{plugin.sha256}";{submodule_attr}
238 }}'''
241 @dataclass(frozen=True)
242 class PluginDesc:
243 repo: Repo
244 branch: str
245 alias: Optional[str]
247 @property
248 def name(self):
249 if self.alias is None:
250 return self.repo.name
251 else:
252 return self.alias
254 def __lt__(self, other):
255 return self.repo.name < other.repo.name
257 @staticmethod
258 def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> 'PluginDesc':
259 branch = row["branch"]
260 repo = make_repo(row['repo'], branch.strip())
261 repo.token = config.github_token
262 return PluginDesc(repo, branch.strip(), row["alias"])
265 @staticmethod
266 def load_from_string(config: FetchConfig, line: str) -> 'PluginDesc':
267 branch = "HEAD"
268 alias = None
269 uri = line
270 if " as " in uri:
271 uri, alias = uri.split(" as ")
272 alias = alias.strip()
273 if "@" in uri:
274 uri, branch = uri.split("@")
275 repo = make_repo(uri.strip(), branch.strip())
276 repo.token = config.github_token
277 return PluginDesc(repo, branch.strip(), alias)
279 @dataclass
280 class Plugin:
281 name: str
282 commit: str
283 has_submodules: bool
284 sha256: str
285 date: Optional[datetime] = None
287 @property
288 def normalized_name(self) -> str:
289 return self.name.replace(".", "-")
291 @property
292 def version(self) -> str:
293 assert self.date is not None
294 return self.date.strftime("%Y-%m-%d")
296 def as_json(self) -> Dict[str, str]:
297 copy = self.__dict__.copy()
298 del copy["date"]
299 return copy
302 def load_plugins_from_csv(config: FetchConfig, input_file: Path,) -> List[PluginDesc]:
303 log.debug("Load plugins from csv %s", input_file)
304 plugins = []
305 with open(input_file, newline='') as csvfile:
306 log.debug("Writing into %s", input_file)
307 reader = csv.DictReader(csvfile,)
308 for line in reader:
309 plugin = PluginDesc.load_from_csv(config, line)
310 plugins.append(plugin)
312 return plugins
314 def run_nix_expr(expr):
315 with CleanEnvironment():
316 cmd = ["nix", "eval", "--extra-experimental-features",
317 "nix-command", "--impure", "--json", "--expr", expr]
318 log.debug("Running command %s", cmd)
319 out = subprocess.check_output(cmd)
320 data = json.loads(out)
321 return data
324 class Editor:
325 """The configuration of the update script."""
327 def __init__(
328 self,
329 name: str,
330 root: Path,
331 get_plugins: str,
332 default_in: Optional[Path] = None,
333 default_out: Optional[Path] = None,
334 deprecated: Optional[Path] = None,
335 cache_file: Optional[str] = None,
337 log.debug("get_plugins:", get_plugins)
338 self.name = name
339 self.root = root
340 self.get_plugins = get_plugins
341 self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
342 self.default_out = default_out or root.joinpath("generated.nix")
343 self.deprecated = deprecated or root.joinpath("deprecated.json")
344 self.cache_file = cache_file or f"{name}-plugin-cache.json"
345 self.nixpkgs_repo = None
347 def get_current_plugins(self) -> List[Plugin]:
348 """To fill the cache"""
349 data = run_nix_expr(self.get_plugins)
350 plugins = []
351 for name, attr in data.items():
352 print("get_current_plugins: name %s" % name)
353 p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
354 plugins.append(p)
355 return plugins
357 def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
358 '''CSV spec'''
359 return load_plugins_from_csv(config, plugin_file)
361 def generate_nix(self, plugins, outfile: str):
362 '''Returns nothing for now, writes directly to outfile'''
363 raise NotImplementedError()
365 def get_update(self, input_file: str, outfile: str, config: FetchConfig):
366 cache: Cache = Cache(self.get_current_plugins(), self.cache_file)
367 _prefetch = functools.partial(prefetch, cache=cache)
369 def update() -> dict:
370 plugins = self.load_plugin_spec(config, input_file)
372 try:
373 pool = Pool(processes=config.proc)
374 results = pool.map(_prefetch, plugins)
375 finally:
376 cache.store()
378 plugins, redirects = check_results(results)
380 self.generate_nix(plugins, outfile)
382 return redirects
384 return update
387 @property
388 def attr_path(self):
389 return self.name + "Plugins"
391 def get_drv_name(self, name: str):
392 return self.attr_path + "." + name
394 def rewrite_input(self, *args, **kwargs):
395 return rewrite_input(*args, **kwargs)
397 def create_parser(self):
398 parser = argparse.ArgumentParser(
399 description=(f"""
400 Updates nix derivations for {self.name} plugins.\n
401 By default from {self.default_in} to {self.default_out}"""
404 parser.add_argument(
405 "--add",
406 dest="add_plugins",
407 default=[],
408 action="append",
409 help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
411 parser.add_argument(
412 "--input-names",
413 "-i",
414 dest="input_file",
415 default=self.default_in,
416 help="A list of plugins in the form owner/repo",
418 parser.add_argument(
419 "--out",
420 "-o",
421 dest="outfile",
422 default=self.default_out,
423 help="Filename to save generated nix code",
425 parser.add_argument(
426 "--proc",
427 "-p",
428 dest="proc",
429 type=int,
430 default=30,
431 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
433 parser.add_argument(
434 "--github-token",
435 "-t",
436 type=str,
437 default=os.getenv("GITHUB_API_TOKEN"),
438 help="""Allows to set --proc to higher values.
439 Uses GITHUB_API_TOKEN environment variables as the default value.""",
441 parser.add_argument(
442 "--no-commit", "-n", action="store_true", default=False,
443 help="Whether to autocommit changes"
445 parser.add_argument(
446 "--debug", "-d", choices=LOG_LEVELS.keys(),
447 default=logging.getLevelName(logging.WARN),
448 help="Adjust log level"
450 return parser
454 class CleanEnvironment(object):
455 def __enter__(self) -> None:
456 self.old_environ = os.environ.copy()
457 local_pkgs = str(Path(__file__).parent.parent.parent)
458 os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
459 self.empty_config = NamedTemporaryFile()
460 self.empty_config.write(b"{}")
461 self.empty_config.flush()
462 os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
464 def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
465 os.environ.update(self.old_environ)
466 self.empty_config.close()
469 def prefetch_plugin(
470 p: PluginDesc,
471 cache: "Optional[Cache]" = None,
472 ) -> Tuple[Plugin, Optional[Repo]]:
473 repo, branch, alias = p.repo, p.branch, p.alias
474 name = alias or p.repo.name
475 commit = None
476 log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
477 commit, date = repo.latest_commit()
478 cached_plugin = cache[commit] if cache else None
479 if cached_plugin is not None:
480 log.debug("Cache hit !")
481 cached_plugin.name = name
482 cached_plugin.date = date
483 return cached_plugin, repo.redirect
485 has_submodules = repo.has_submodules()
486 log.debug(f"prefetch {name}")
487 sha256 = repo.prefetch(commit)
489 return (
490 Plugin(name, commit, has_submodules, sha256, date=date),
491 repo.redirect,
495 def print_download_error(plugin: PluginDesc, ex: Exception):
496 print(f"{plugin}: {ex}", file=sys.stderr)
497 ex_traceback = ex.__traceback__
498 tb_lines = [
499 line.rstrip("\n")
500 for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
502 print("\n".join(tb_lines))
504 def check_results(
505 results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
506 ) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
507 ''' '''
508 failures: List[Tuple[PluginDesc, Exception]] = []
509 plugins = []
510 redirects: Redirects = {}
511 for (pdesc, result, redirect) in results:
512 if isinstance(result, Exception):
513 failures.append((pdesc, result))
514 else:
515 new_pdesc = pdesc
516 if redirect is not None:
517 redirects.update({pdesc: redirect})
518 new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
519 plugins.append((new_pdesc, result))
521 print(f"{len(results) - len(failures)} plugins were checked", end="")
522 if len(failures) == 0:
523 print()
524 return plugins, redirects
525 else:
526 print(f", {len(failures)} plugin(s) could not be downloaded:\n")
528 for (plugin, exception) in failures:
529 print_download_error(plugin, exception)
531 sys.exit(1)
533 def make_repo(uri: str, branch) -> Repo:
534 '''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
535 # dumb check to see if it's of the form owner/repo (=> github) or https://...
536 res = urlparse(uri)
537 if res.netloc in [ "github.com", ""]:
538 res = res.path.strip('/').split('/')
539 repo = RepoGitHub(res[0], res[1], branch)
540 else:
541 repo = Repo(uri.strip(), branch)
542 return repo
545 def get_cache_path(cache_file_name: str) -> Optional[Path]:
546 xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
547 if xdg_cache is None:
548 home = os.environ.get("HOME", None)
549 if home is None:
550 return None
551 xdg_cache = str(Path(home, ".cache"))
553 return Path(xdg_cache, cache_file_name)
556 class Cache:
557 def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
558 self.cache_file = get_cache_path(cache_file_name)
560 downloads = {}
561 for plugin in initial_plugins:
562 downloads[plugin.commit] = plugin
563 downloads.update(self.load())
564 self.downloads = downloads
566 def load(self) -> Dict[str, Plugin]:
567 if self.cache_file is None or not self.cache_file.exists():
568 return {}
570 downloads: Dict[str, Plugin] = {}
571 with open(self.cache_file) as f:
572 data = json.load(f)
573 for attr in data.values():
574 p = Plugin(
575 attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
577 downloads[attr["commit"]] = p
578 return downloads
580 def store(self) -> None:
581 if self.cache_file is None:
582 return
584 os.makedirs(self.cache_file.parent, exist_ok=True)
585 with open(self.cache_file, "w+") as f:
586 data = {}
587 for name, attr in self.downloads.items():
588 data[name] = attr.as_json()
589 json.dump(data, f, indent=4, sort_keys=True)
591 def __getitem__(self, key: str) -> Optional[Plugin]:
592 return self.downloads.get(key, None)
594 def __setitem__(self, key: str, value: Plugin) -> None:
595 self.downloads[key] = value
598 def prefetch(
599 pluginDesc: PluginDesc, cache: Cache
600 ) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
601 try:
602 plugin, redirect = prefetch_plugin(pluginDesc, cache)
603 cache[plugin.commit] = plugin
604 return (pluginDesc, plugin, redirect)
605 except Exception as e:
606 return (pluginDesc, e, None)
610 def rewrite_input(
611 config: FetchConfig,
612 input_file: Path,
613 deprecated: Path,
614 # old pluginDesc and the new
615 redirects: Redirects = {},
616 append: List[PluginDesc] = [],
618 plugins = load_plugins_from_csv(config, input_file,)
620 plugins.extend(append)
622 if redirects:
624 cur_date_iso = datetime.now().strftime("%Y-%m-%d")
625 with open(deprecated, "r") as f:
626 deprecations = json.load(f)
627 for pdesc, new_repo in redirects.items():
628 new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
629 old_plugin, _ = prefetch_plugin(pdesc)
630 new_plugin, _ = prefetch_plugin(new_pdesc)
631 if old_plugin.normalized_name != new_plugin.normalized_name:
632 deprecations[old_plugin.normalized_name] = {
633 "new": new_plugin.normalized_name,
634 "date": cur_date_iso,
636 with open(deprecated, "w") as f:
637 json.dump(deprecations, f, indent=4, sort_keys=True)
638 f.write("\n")
640 with open(input_file, "w") as f:
641 log.debug("Writing into %s", input_file)
642 # fields = dataclasses.fields(PluginDesc)
643 fieldnames = ['repo', 'branch', 'alias']
644 writer = csv.DictWriter(f, fieldnames, dialect='unix', quoting=csv.QUOTE_NONE)
645 writer.writeheader()
646 for plugin in sorted(plugins):
647 writer.writerow(asdict(plugin))
650 def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
651 repo.index.add([str(f.resolve()) for f in files])
653 if repo.index.diff("HEAD"):
654 print(f'committing to nixpkgs "{message}"')
655 repo.index.commit(message)
656 else:
657 print("no changes in working tree to commit")
661 def update_plugins(editor: Editor, args):
662 """The main entry function of this module. All input arguments are grouped in the `Editor`."""
664 log.setLevel(LOG_LEVELS[args.debug])
665 log.info("Start updating plugins")
666 fetch_config = FetchConfig(args.proc, args.github_token)
667 update = editor.get_update(args.input_file, args.outfile, fetch_config)
669 redirects = update()
670 editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
672 autocommit = not args.no_commit
674 if autocommit:
675 editor.nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
676 commit(editor.nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
678 if redirects:
679 update()
680 if autocommit:
681 commit(
682 editor.nixpkgs_repo,
683 f"{editor.attr_path}: resolve github repository redirects",
684 [args.outfile, args.input_file, editor.deprecated],
687 for plugin_line in args.add_plugins:
688 pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
689 append = [ pdesc ]
690 editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, append=append)
691 update()
692 plugin, _ = prefetch_plugin(pdesc, )
693 if autocommit:
694 commit(
695 editor.nixpkgs_repo,
696 "{drv_name}: init at {version}".format(
697 drv_name=editor.get_drv_name(plugin.normalized_name),
698 version=plugin.version
700 [args.outfile, args.input_file],