1 # python library used to update plugins:
2 # - pkgs/applications/editors/vim/plugins/update.py
3 # - pkgs/applications/editors/kakoune/plugins/update.py
4 # - maintainers/scripts/update-luarocks-packages
7 # $ nix run nixpkgs.python3Packages.black -c black update.py
9 # $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
11 # $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
27 import xml
.etree
.ElementTree
as ET
28 from datetime
import datetime
29 from functools
import wraps
30 from multiprocessing
.dummy
import Pool
31 from pathlib
import Path
32 from typing
import Dict
, List
, Optional
, Tuple
, Union
, Any
, Callable
33 from urllib
.parse
import urljoin
, urlparse
34 from tempfile
import NamedTemporaryFile
35 from dataclasses
import dataclass
, asdict
39 ATOM_ENTRY
= "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
40 ATOM_LINK
= "{http://www.w3.org/2005/Atom}link" # "
41 ATOM_UPDATED
= "{http://www.w3.org/2005/Atom}updated" # "
44 logging
.getLevelName(level
): level
for level
in [
45 logging
.DEBUG
, logging
.INFO
, logging
.WARN
, logging
.ERROR
]
48 log
= logging
.getLogger()
50 def retry(ExceptionToCheck
: Any
, tries
: int = 4, delay
: float = 3, backoff
: float = 2):
51 """Retry calling the decorated function using an exponential backoff.
52 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
53 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
55 :param ExceptionToCheck: the exception on which to retry
56 :param tries: number of times to try (not retry) before giving up
57 :param delay: initial delay between retries in seconds
58 :param backoff: backoff multiplier e.g. value of 2 will double the delay
62 def deco_retry(f
: Callable
) -> Callable
:
64 def f_retry(*args
: Any
, **kwargs
: Any
) -> Any
:
65 mtries
, mdelay
= tries
, delay
68 return f(*args
, **kwargs
)
69 except ExceptionToCheck
as e
:
70 print(f
"{str(e)}, Retrying in {mdelay} seconds...")
74 return f(*args
, **kwargs
)
76 return f_retry
# true decorator
86 def make_request(url
: str, token
=None) -> urllib
.request
.Request
:
89 headers
["Authorization"] = f
"token {token}"
90 return urllib
.request
.Request(url
, headers
=headers
)
93 # a dictionary of plugins and their new repositories
94 Redirects
= Dict
['PluginDesc', 'Repo']
98 self
, uri
: str, branch
: str
101 '''Url to the repo'''
102 self
._branch
= branch
103 # Redirect is the new Repo to use
104 self
.redirect
: Optional
['Repo'] = None
105 self
.token
= "dummy_token"
109 return self
.uri
.split('/')[-1]
113 return self
._branch
or "HEAD"
115 def __str__(self
) -> str:
117 def __repr__(self
) -> str:
118 return f
"Repo({self.name}, {self.uri})"
120 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
121 def has_submodules(self
) -> bool:
124 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
125 def latest_commit(self
) -> Tuple
[str, datetime
]:
126 log
.debug("Latest commit")
127 loaded
= self
._prefetch
(None)
128 updated
= datetime
.strptime(loaded
['date'], "%Y-%m-%dT%H:%M:%S%z")
130 return loaded
['rev'], updated
132 def _prefetch(self
, ref
: Optional
[str]):
133 cmd
= ["nix-prefetch-git", "--quiet", "--fetch-submodules", self
.uri
]
137 data
= subprocess
.check_output(cmd
)
138 loaded
= json
.loads(data
)
141 def prefetch(self
, ref
: Optional
[str]) -> str:
143 loaded
= self
._prefetch
(ref
)
144 return loaded
["sha256"]
146 def as_nix(self
, plugin
: "Plugin") -> str:
147 return f
'''fetchgit {{
149 rev = "{plugin.commit}";
150 sha256 = "{plugin.sha256}";
154 class RepoGitHub(Repo
):
156 self
, owner
: str, repo
: str, branch
: str
161 '''Url to the repo'''
162 super().__init
__(self
.url(""), branch
)
163 log
.debug("Instantiating github repo owner=%s and repo=%s", self
.owner
, self
.repo
)
169 def url(self
, path
: str) -> str:
170 res
= urljoin(f
"https://github.com/{self.owner}/{self.repo}/", path
)
173 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
174 def has_submodules(self
) -> bool:
176 req
= make_request(self
.url(f
"blob/{self.branch}/.gitmodules"), self
.token
)
177 urllib
.request
.urlopen(req
, timeout
=10).close()
178 except urllib
.error
.HTTPError
as e
:
185 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
186 def latest_commit(self
) -> Tuple
[str, datetime
]:
187 commit_url
= self
.url(f
"commits/{self.branch}.atom")
188 log
.debug("Sending request to %s", commit_url
)
189 commit_req
= make_request(commit_url
, self
.token
)
190 with urllib
.request
.urlopen(commit_req
, timeout
=10) as req
:
191 self
._check
_for
_redirect
(commit_url
, req
)
193 root
= ET
.fromstring(xml
)
194 latest_entry
= root
.find(ATOM_ENTRY
)
195 assert latest_entry
is not None, f
"No commits found in repository {self}"
196 commit_link
= latest_entry
.find(ATOM_LINK
)
197 assert commit_link
is not None, f
"No link tag found feed entry {xml}"
198 url
= urlparse(commit_link
.get("href"))
199 updated_tag
= latest_entry
.find(ATOM_UPDATED
)
201 updated_tag
is not None and updated_tag
.text
is not None
202 ), f
"No updated tag found feed entry {xml}"
203 updated
= datetime
.strptime(updated_tag
.text
, "%Y-%m-%dT%H:%M:%SZ")
204 return Path(str(url
.path
)).name
, updated
206 def _check_for_redirect(self
, url
: str, req
: http
.client
.HTTPResponse
):
207 response_url
= req
.geturl()
208 if url
!= response_url
:
209 new_owner
, new_name
= (
210 urllib
.parse
.urlsplit(response_url
).path
.strip("/").split("/")[:2]
213 new_repo
= RepoGitHub(owner
=new_owner
, repo
=new_name
, branch
=self
.branch
)
214 self
.redirect
= new_repo
217 def prefetch(self
, commit
: str) -> str:
218 if self
.has_submodules():
219 sha256
= super().prefetch(commit
)
221 sha256
= self
.prefetch_github(commit
)
224 def prefetch_github(self
, ref
: str) -> str:
225 cmd
= ["nix-prefetch-url", "--unpack", self
.url(f
"archive/{ref}.tar.gz")]
226 log
.debug("Running %s", cmd
)
227 data
= subprocess
.check_output(cmd
)
228 return data
.strip().decode("utf-8")
230 def as_nix(self
, plugin
: "Plugin") -> str:
231 if plugin
.has_submodules
:
232 submodule_attr
= "\n fetchSubmodules = true;"
236 return f
'''fetchFromGitHub {{
237 owner = "{self.owner}";
238 repo = "{self.repo}";
239 rev = "{plugin.commit}";
240 sha256 = "{plugin.sha256}";{submodule_attr}
244 @dataclass(frozen
=True)
252 if self
.alias
is None:
253 return self
.repo
.name
257 def __lt__(self
, other
):
258 return self
.repo
.name
< other
.repo
.name
261 def load_from_csv(config
: FetchConfig
, row
: Dict
[str, str]) -> 'PluginDesc':
262 branch
= row
["branch"]
263 repo
= make_repo(row
['repo'], branch
.strip())
264 repo
.token
= config
.github_token
265 return PluginDesc(repo
, branch
.strip(), row
["alias"])
269 def load_from_string(config
: FetchConfig
, line
: str) -> 'PluginDesc':
274 uri
, alias
= uri
.split(" as ")
275 alias
= alias
.strip()
277 uri
, branch
= uri
.split("@")
278 repo
= make_repo(uri
.strip(), branch
.strip())
279 repo
.token
= config
.github_token
280 return PluginDesc(repo
, branch
.strip(), alias
)
288 date
: Optional
[datetime
] = None
291 def normalized_name(self
) -> str:
292 return self
.name
.replace(".", "-")
295 def version(self
) -> str:
296 assert self
.date
is not None
297 return self
.date
.strftime("%Y-%m-%d")
299 def as_json(self
) -> Dict
[str, str]:
300 copy
= self
.__dict
__.copy()
305 def load_plugins_from_csv(config
: FetchConfig
, input_file
: Path
,) -> List
[PluginDesc
]:
306 log
.debug("Load plugins from csv %s", input_file
)
308 with
open(input_file
, newline
='') as csvfile
:
309 log
.debug("Writing into %s", input_file
)
310 reader
= csv
.DictReader(csvfile
,)
312 plugin
= PluginDesc
.load_from_csv(config
, line
)
313 plugins
.append(plugin
)
317 def run_nix_expr(expr
):
318 with
CleanEnvironment():
319 cmd
= ["nix", "eval", "--extra-experimental-features",
320 "nix-command", "--impure", "--json", "--expr", expr
]
321 log
.debug("Running command %s", " ".join(cmd
))
322 out
= subprocess
.check_output(cmd
)
323 data
= json
.loads(out
)
328 """The configuration of the update script."""
335 default_in
: Optional
[Path
] = None,
336 default_out
: Optional
[Path
] = None,
337 deprecated
: Optional
[Path
] = None,
338 cache_file
: Optional
[str] = None,
340 log
.debug("get_plugins:", get_plugins
)
343 self
.get_plugins
= get_plugins
344 self
.default_in
= default_in
or root
.joinpath(f
"{name}-plugin-names")
345 self
.default_out
= default_out
or root
.joinpath("generated.nix")
346 self
.deprecated
= deprecated
or root
.joinpath("deprecated.json")
347 self
.cache_file
= cache_file
or f
"{name}-plugin-cache.json"
348 self
.nixpkgs_repo
= None
352 log
.debug("called the 'add' command")
353 fetch_config
= FetchConfig(args
.proc
, args
.github_token
)
355 for plugin_line
in args
.add_plugins
:
356 log
.debug("using plugin_line", plugin_line
)
357 pdesc
= PluginDesc
.load_from_string(fetch_config
, plugin_line
)
358 log
.debug("loaded as pdesc", pdesc
)
360 editor
.rewrite_input(fetch_config
, args
.input_file
, editor
.deprecated
, append
=append
)
361 plugin
, _
= prefetch_plugin(pdesc
, )
362 autocommit
= not args
.no_commit
366 "{drv_name}: init at {version}".format(
367 drv_name
=editor
.get_drv_name(plugin
.normalized_name
),
368 version
=plugin
.version
370 [args
.outfile
, args
.input_file
],
373 # Expects arguments generated by 'update' subparser
374 def update(self
, args
):
376 print("the update member function should be overriden in subclasses")
378 def get_current_plugins(self
) -> List
[Plugin
]:
379 """To fill the cache"""
380 data
= run_nix_expr(self
.get_plugins
)
382 for name
, attr
in data
.items():
383 p
= Plugin(name
, attr
["rev"], attr
["submodules"], attr
["sha256"])
387 def load_plugin_spec(self
, config
: FetchConfig
, plugin_file
) -> List
[PluginDesc
]:
389 return load_plugins_from_csv(config
, plugin_file
)
391 def generate_nix(self
, _plugins
, _outfile
: str):
392 '''Returns nothing for now, writes directly to outfile'''
393 raise NotImplementedError()
395 def get_update(self
, input_file
: str, outfile
: str, config
: FetchConfig
):
396 cache
: Cache
= Cache(self
.get_current_plugins(), self
.cache_file
)
397 _prefetch
= functools
.partial(prefetch
, cache
=cache
)
399 def update() -> dict:
400 plugins
= self
.load_plugin_spec(config
, input_file
)
403 pool
= Pool(processes
=config
.proc
)
404 results
= pool
.map(_prefetch
, plugins
)
408 plugins
, redirects
= check_results(results
)
410 self
.generate_nix(plugins
, outfile
)
419 return self
.name
+ "Plugins"
421 def get_drv_name(self
, name
: str):
422 return self
.attr_path
+ "." + name
424 def rewrite_input(self
, *args
, **kwargs
):
425 return rewrite_input(*args
, **kwargs
)
427 def create_parser(self
):
428 common
= argparse
.ArgumentParser(
431 Updates nix derivations for {self.name} plugins.\n
432 By default from {self.default_in} to {self.default_out}"""
439 default
=self
.default_in
,
440 help="A list of plugins in the form owner/repo",
446 default
=self
.default_out
,
447 help="Filename to save generated nix code",
455 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
461 default
=os
.getenv("GITHUB_API_TOKEN"),
462 help="""Allows to set --proc to higher values.
463 Uses GITHUB_API_TOKEN environment variables as the default value.""",
466 "--no-commit", "-n", action
="store_true", default
=False,
467 help="Whether to autocommit changes"
470 "--debug", "-d", choices
=LOG_LEVELS
.keys(),
471 default
=logging
.getLevelName(logging
.WARN
),
472 help="Adjust log level"
475 main
= argparse
.ArgumentParser(
478 Updates nix derivations for {self.name} plugins.\n
479 By default from {self.default_in} to {self.default_out}"""
483 subparsers
= main
.add_subparsers(dest
="command", required
=False)
484 padd
= subparsers
.add_parser(
486 description
="Add new plugin",
489 padd
.set_defaults(func
=self
.add
)
494 help=f
"Plugin to add to {self.attr_path} from Github in the form owner/repo",
497 pupdate
= subparsers
.add_parser(
499 description
="Update all or a subset of existing plugins",
502 pupdate
.set_defaults(func
=self
.update
)
509 parser
= self
.create_parser()
510 args
= parser
.parse_args()
511 command
= args
.command
or "update"
512 log
.setLevel(LOG_LEVELS
[args
.debug
])
513 log
.info("Chose to run command: %s", command
)
515 if not args
.no_commit
:
516 self
.nixpkgs_repo
= git
.Repo(self
.root
, search_parent_directories
=True)
518 getattr(self
, command
)(args
)
523 class CleanEnvironment(object):
524 def __enter__(self
) -> None:
525 self
.old_environ
= os
.environ
.copy()
526 local_pkgs
= str(Path(__file__
).parent
.parent
.parent
)
527 os
.environ
["NIX_PATH"] = f
"localpkgs={local_pkgs}"
528 self
.empty_config
= NamedTemporaryFile()
529 self
.empty_config
.write(b
"{}")
530 self
.empty_config
.flush()
531 os
.environ
["NIXPKGS_CONFIG"] = self
.empty_config
.name
533 def __exit__(self
, exc_type
: Any
, exc_value
: Any
, traceback
: Any
) -> None:
534 os
.environ
.update(self
.old_environ
)
535 self
.empty_config
.close()
540 cache
: "Optional[Cache]" = None,
541 ) -> Tuple
[Plugin
, Optional
[Repo
]]:
542 repo
, branch
, alias
= p
.repo
, p
.branch
, p
.alias
543 name
= alias
or p
.repo
.name
545 log
.info(f
"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
546 commit
, date
= repo
.latest_commit()
547 cached_plugin
= cache
[commit
] if cache
else None
548 if cached_plugin
is not None:
549 log
.debug("Cache hit !")
550 cached_plugin
.name
= name
551 cached_plugin
.date
= date
552 return cached_plugin
, repo
.redirect
554 has_submodules
= repo
.has_submodules()
555 log
.debug(f
"prefetch {name}")
556 sha256
= repo
.prefetch(commit
)
559 Plugin(name
, commit
, has_submodules
, sha256
, date
=date
),
564 def print_download_error(plugin
: PluginDesc
, ex
: Exception):
565 print(f
"{plugin}: {ex}", file=sys
.stderr
)
566 ex_traceback
= ex
.__traceback
__
569 for line
in traceback
.format_exception(ex
.__class
__, ex
, ex_traceback
)
571 print("\n".join(tb_lines
))
574 results
: List
[Tuple
[PluginDesc
, Union
[Exception, Plugin
], Optional
[Repo
]]]
575 ) -> Tuple
[List
[Tuple
[PluginDesc
, Plugin
]], Redirects
]:
577 failures
: List
[Tuple
[PluginDesc
, Exception]] = []
579 redirects
: Redirects
= {}
580 for (pdesc
, result
, redirect
) in results
:
581 if isinstance(result
, Exception):
582 failures
.append((pdesc
, result
))
585 if redirect
is not None:
586 redirects
.update({pdesc
: redirect
})
587 new_pdesc
= PluginDesc(redirect
, pdesc
.branch
, pdesc
.alias
)
588 plugins
.append((new_pdesc
, result
))
590 print(f
"{len(results) - len(failures)} plugins were checked", end
="")
591 if len(failures
) == 0:
593 return plugins
, redirects
595 print(f
", {len(failures)} plugin(s) could not be downloaded:\n")
597 for (plugin
, exception
) in failures
:
598 print_download_error(plugin
, exception
)
602 def make_repo(uri
: str, branch
) -> Repo
:
603 '''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
604 # dumb check to see if it's of the form owner/repo (=> github) or https://...
606 if res
.netloc
in [ "github.com", ""]:
607 res
= res
.path
.strip('/').split('/')
608 repo
= RepoGitHub(res
[0], res
[1], branch
)
610 repo
= Repo(uri
.strip(), branch
)
614 def get_cache_path(cache_file_name
: str) -> Optional
[Path
]:
615 xdg_cache
= os
.environ
.get("XDG_CACHE_HOME", None)
616 if xdg_cache
is None:
617 home
= os
.environ
.get("HOME", None)
620 xdg_cache
= str(Path(home
, ".cache"))
622 return Path(xdg_cache
, cache_file_name
)
626 def __init__(self
, initial_plugins
: List
[Plugin
], cache_file_name
: str) -> None:
627 self
.cache_file
= get_cache_path(cache_file_name
)
630 for plugin
in initial_plugins
:
631 downloads
[plugin
.commit
] = plugin
632 downloads
.update(self
.load())
633 self
.downloads
= downloads
635 def load(self
) -> Dict
[str, Plugin
]:
636 if self
.cache_file
is None or not self
.cache_file
.exists():
639 downloads
: Dict
[str, Plugin
] = {}
640 with
open(self
.cache_file
) as f
:
642 for attr
in data
.values():
644 attr
["name"], attr
["commit"], attr
["has_submodules"], attr
["sha256"]
646 downloads
[attr
["commit"]] = p
649 def store(self
) -> None:
650 if self
.cache_file
is None:
653 os
.makedirs(self
.cache_file
.parent
, exist_ok
=True)
654 with
open(self
.cache_file
, "w+") as f
:
656 for name
, attr
in self
.downloads
.items():
657 data
[name
] = attr
.as_json()
658 json
.dump(data
, f
, indent
=4, sort_keys
=True)
660 def __getitem__(self
, key
: str) -> Optional
[Plugin
]:
661 return self
.downloads
.get(key
, None)
663 def __setitem__(self
, key
: str, value
: Plugin
) -> None:
664 self
.downloads
[key
] = value
668 pluginDesc
: PluginDesc
, cache
: Cache
669 ) -> Tuple
[PluginDesc
, Union
[Exception, Plugin
], Optional
[Repo
]]:
671 plugin
, redirect
= prefetch_plugin(pluginDesc
, cache
)
672 cache
[plugin
.commit
] = plugin
673 return (pluginDesc
, plugin
, redirect
)
674 except Exception as e
:
675 return (pluginDesc
, e
, None)
683 # old pluginDesc and the new
684 redirects
: Redirects
= {},
685 append
: List
[PluginDesc
] = [],
687 plugins
= load_plugins_from_csv(config
, input_file
,)
689 plugins
.extend(append
)
693 cur_date_iso
= datetime
.now().strftime("%Y-%m-%d")
694 with
open(deprecated
, "r") as f
:
695 deprecations
= json
.load(f
)
696 for pdesc
, new_repo
in redirects
.items():
697 new_pdesc
= PluginDesc(new_repo
, pdesc
.branch
, pdesc
.alias
)
698 old_plugin
, _
= prefetch_plugin(pdesc
)
699 new_plugin
, _
= prefetch_plugin(new_pdesc
)
700 if old_plugin
.normalized_name
!= new_plugin
.normalized_name
:
701 deprecations
[old_plugin
.normalized_name
] = {
702 "new": new_plugin
.normalized_name
,
703 "date": cur_date_iso
,
705 with
open(deprecated
, "w") as f
:
706 json
.dump(deprecations
, f
, indent
=4, sort_keys
=True)
709 with
open(input_file
, "w") as f
:
710 log
.debug("Writing into %s", input_file
)
711 # fields = dataclasses.fields(PluginDesc)
712 fieldnames
= ['repo', 'branch', 'alias']
713 writer
= csv
.DictWriter(f
, fieldnames
, dialect
='unix', quoting
=csv
.QUOTE_NONE
)
715 for plugin
in sorted(plugins
):
716 writer
.writerow(asdict(plugin
))
719 def commit(repo
: git
.Repo
, message
: str, files
: List
[Path
]) -> None:
720 repo
.index
.add([str(f
.resolve()) for f
in files
])
722 if repo
.index
.diff("HEAD"):
723 print(f
'committing to nixpkgs "{message}"')
724 repo
.index
.commit(message
)
726 print("no changes in working tree to commit")
730 def update_plugins(editor
: Editor
, args
):
731 """The main entry function of this module. All input arguments are grouped in the `Editor`."""
733 log
.info("Start updating plugins")
734 fetch_config
= FetchConfig(args
.proc
, args
.github_token
)
735 update
= editor
.get_update(args
.input_file
, args
.outfile
, fetch_config
)
738 editor
.rewrite_input(fetch_config
, args
.input_file
, editor
.deprecated
, redirects
)
740 autocommit
= not args
.no_commit
743 editor
.nixpkgs_repo
= git
.Repo(editor
.root
, search_parent_directories
=True)
744 commit(editor
.nixpkgs_repo
, f
"{editor.attr_path}: update", [args
.outfile
])
751 f
"{editor.attr_path}: resolve github repository redirects",
752 [args
.outfile
, args
.input_file
, editor
.deprecated
],