1 # Used by pkgs/applications/editors/vim/plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
4 # $ nix run nixpkgs.python3Packages.black -c black update.py
6 # $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
8 # $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
24 import xml
.etree
.ElementTree
as ET
25 from datetime
import datetime
26 from functools
import wraps
27 from multiprocessing
.dummy
import Pool
28 from pathlib
import Path
29 from typing
import Dict
, List
, Optional
, Tuple
, Union
, Any
, Callable
30 from urllib
.parse
import urljoin
, urlparse
31 from tempfile
import NamedTemporaryFile
32 from dataclasses
import dataclass
, asdict
36 ATOM_ENTRY
= "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
37 ATOM_LINK
= "{http://www.w3.org/2005/Atom}link" # "
38 ATOM_UPDATED
= "{http://www.w3.org/2005/Atom}updated" # "
41 logging
.getLevelName(level
): level
for level
in [
42 logging
.DEBUG
, logging
.INFO
, logging
.WARN
, logging
.ERROR
]
45 log
= logging
.getLogger()
47 def retry(ExceptionToCheck
: Any
, tries
: int = 4, delay
: float = 3, backoff
: float = 2):
48 """Retry calling the decorated function using an exponential backoff.
49 http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
50 original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
52 :param ExceptionToCheck: the exception on which to retry
53 :param tries: number of times to try (not retry) before giving up
54 :param delay: initial delay between retries in seconds
55 :param backoff: backoff multiplier e.g. value of 2 will double the delay
59 def deco_retry(f
: Callable
) -> Callable
:
61 def f_retry(*args
: Any
, **kwargs
: Any
) -> Any
:
62 mtries
, mdelay
= tries
, delay
65 return f(*args
, **kwargs
)
66 except ExceptionToCheck
as e
:
67 print(f
"{str(e)}, Retrying in {mdelay} seconds...")
71 return f(*args
, **kwargs
)
73 return f_retry
# true decorator
83 def make_request(url
: str, token
=None) -> urllib
.request
.Request
:
86 headers
["Authorization"] = f
"token {token}"
87 return urllib
.request
.Request(url
, headers
=headers
)
90 # a dictionary of plugins and their new repositories
91 Redirects
= Dict
['PluginDesc', 'Repo']
95 self
, uri
: str, branch
: str
100 # Redirect is the new Repo to use
101 self
.redirect
: Optional
['Repo'] = None
102 self
.token
= "dummy_token"
106 return self
.uri
.split('/')[-1]
110 return self
._branch
or "HEAD"
112 def __str__(self
) -> str:
114 def __repr__(self
) -> str:
115 return f
"Repo({self.name}, {self.uri})"
117 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
118 def has_submodules(self
) -> bool:
121 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
122 def latest_commit(self
) -> Tuple
[str, datetime
]:
123 log
.debug("Latest commit")
124 loaded
= self
._prefetch
(None)
125 updated
= datetime
.strptime(loaded
['date'], "%Y-%m-%dT%H:%M:%S%z")
127 return loaded
['rev'], updated
129 def _prefetch(self
, ref
: Optional
[str]):
130 cmd
= ["nix-prefetch-git", "--quiet", "--fetch-submodules", self
.uri
]
134 data
= subprocess
.check_output(cmd
)
135 loaded
= json
.loads(data
)
138 def prefetch(self
, ref
: Optional
[str]) -> str:
140 loaded
= self
._prefetch
(ref
)
141 return loaded
["sha256"]
143 def as_nix(self
, plugin
: "Plugin") -> str:
144 return f
'''fetchgit {{
146 rev = "{plugin.commit}";
147 sha256 = "{plugin.sha256}";
151 class RepoGitHub(Repo
):
153 self
, owner
: str, repo
: str, branch
: str
158 '''Url to the repo'''
159 super().__init
__(self
.url(""), branch
)
160 log
.debug("Instantiating github repo owner=%s and repo=%s", self
.owner
, self
.repo
)
166 def url(self
, path
: str) -> str:
167 res
= urljoin(f
"https://github.com/{self.owner}/{self.repo}/", path
)
170 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
171 def has_submodules(self
) -> bool:
173 req
= make_request(self
.url(f
"blob/{self.branch}/.gitmodules"), self
.token
)
174 urllib
.request
.urlopen(req
, timeout
=10).close()
175 except urllib
.error
.HTTPError
as e
:
182 @retry(urllib
.error
.URLError
, tries
=4, delay
=3, backoff
=2)
183 def latest_commit(self
) -> Tuple
[str, datetime
]:
184 commit_url
= self
.url(f
"commits/{self.branch}.atom")
185 log
.debug("Sending request to %s", commit_url
)
186 commit_req
= make_request(commit_url
, self
.token
)
187 with urllib
.request
.urlopen(commit_req
, timeout
=10) as req
:
188 self
._check
_for
_redirect
(commit_url
, req
)
190 root
= ET
.fromstring(xml
)
191 latest_entry
= root
.find(ATOM_ENTRY
)
192 assert latest_entry
is not None, f
"No commits found in repository {self}"
193 commit_link
= latest_entry
.find(ATOM_LINK
)
194 assert commit_link
is not None, f
"No link tag found feed entry {xml}"
195 url
= urlparse(commit_link
.get("href"))
196 updated_tag
= latest_entry
.find(ATOM_UPDATED
)
198 updated_tag
is not None and updated_tag
.text
is not None
199 ), f
"No updated tag found feed entry {xml}"
200 updated
= datetime
.strptime(updated_tag
.text
, "%Y-%m-%dT%H:%M:%SZ")
201 return Path(str(url
.path
)).name
, updated
203 def _check_for_redirect(self
, url
: str, req
: http
.client
.HTTPResponse
):
204 response_url
= req
.geturl()
205 if url
!= response_url
:
206 new_owner
, new_name
= (
207 urllib
.parse
.urlsplit(response_url
).path
.strip("/").split("/")[:2]
210 new_repo
= RepoGitHub(owner
=new_owner
, repo
=new_name
, branch
=self
.branch
)
211 self
.redirect
= new_repo
214 def prefetch(self
, commit
: str) -> str:
215 if self
.has_submodules():
216 sha256
= super().prefetch(commit
)
218 sha256
= self
.prefetch_github(commit
)
221 def prefetch_github(self
, ref
: str) -> str:
222 cmd
= ["nix-prefetch-url", "--unpack", self
.url(f
"archive/{ref}.tar.gz")]
223 log
.debug("Running %s", cmd
)
224 data
= subprocess
.check_output(cmd
)
225 return data
.strip().decode("utf-8")
227 def as_nix(self
, plugin
: "Plugin") -> str:
228 if plugin
.has_submodules
:
229 submodule_attr
= "\n fetchSubmodules = true;"
233 return f
'''fetchFromGitHub {{
234 owner = "{self.owner}";
235 repo = "{self.repo}";
236 rev = "{plugin.commit}";
237 sha256 = "{plugin.sha256}";{submodule_attr}
241 @dataclass(frozen
=True)
249 if self
.alias
is None:
250 return self
.repo
.name
254 def __lt__(self
, other
):
255 return self
.repo
.name
< other
.repo
.name
258 def load_from_csv(config
: FetchConfig
, row
: Dict
[str, str]) -> 'PluginDesc':
259 branch
= row
["branch"]
260 repo
= make_repo(row
['repo'], branch
.strip())
261 repo
.token
= config
.github_token
262 return PluginDesc(repo
, branch
.strip(), row
["alias"])
266 def load_from_string(config
: FetchConfig
, line
: str) -> 'PluginDesc':
271 uri
, alias
= uri
.split(" as ")
272 alias
= alias
.strip()
274 uri
, branch
= uri
.split("@")
275 repo
= make_repo(uri
.strip(), branch
.strip())
276 repo
.token
= config
.github_token
277 return PluginDesc(repo
, branch
.strip(), alias
)
285 date
: Optional
[datetime
] = None
288 def normalized_name(self
) -> str:
289 return self
.name
.replace(".", "-")
292 def version(self
) -> str:
293 assert self
.date
is not None
294 return self
.date
.strftime("%Y-%m-%d")
296 def as_json(self
) -> Dict
[str, str]:
297 copy
= self
.__dict
__.copy()
302 def load_plugins_from_csv(config
: FetchConfig
, input_file
: Path
,) -> List
[PluginDesc
]:
303 log
.debug("Load plugins from csv %s", input_file
)
305 with
open(input_file
, newline
='') as csvfile
:
306 log
.debug("Writing into %s", input_file
)
307 reader
= csv
.DictReader(csvfile
,)
309 plugin
= PluginDesc
.load_from_csv(config
, line
)
310 plugins
.append(plugin
)
314 def run_nix_expr(expr
):
315 with
CleanEnvironment():
316 cmd
= ["nix", "eval", "--extra-experimental-features",
317 "nix-command", "--impure", "--json", "--expr", expr
]
318 log
.debug("Running command %s", cmd
)
319 out
= subprocess
.check_output(cmd
)
320 data
= json
.loads(out
)
325 """The configuration of the update script."""
332 default_in
: Optional
[Path
] = None,
333 default_out
: Optional
[Path
] = None,
334 deprecated
: Optional
[Path
] = None,
335 cache_file
: Optional
[str] = None,
337 log
.debug("get_plugins:", get_plugins
)
340 self
.get_plugins
= get_plugins
341 self
.default_in
= default_in
or root
.joinpath(f
"{name}-plugin-names")
342 self
.default_out
= default_out
or root
.joinpath("generated.nix")
343 self
.deprecated
= deprecated
or root
.joinpath("deprecated.json")
344 self
.cache_file
= cache_file
or f
"{name}-plugin-cache.json"
345 self
.nixpkgs_repo
= None
347 def get_current_plugins(self
) -> List
[Plugin
]:
348 """To fill the cache"""
349 data
= run_nix_expr(self
.get_plugins
)
351 for name
, attr
in data
.items():
352 print("get_current_plugins: name %s" % name
)
353 p
= Plugin(name
, attr
["rev"], attr
["submodules"], attr
["sha256"])
357 def load_plugin_spec(self
, config
: FetchConfig
, plugin_file
) -> List
[PluginDesc
]:
359 return load_plugins_from_csv(config
, plugin_file
)
361 def generate_nix(self
, plugins
, outfile
: str):
362 '''Returns nothing for now, writes directly to outfile'''
363 raise NotImplementedError()
365 def get_update(self
, input_file
: str, outfile
: str, config
: FetchConfig
):
366 cache
: Cache
= Cache(self
.get_current_plugins(), self
.cache_file
)
367 _prefetch
= functools
.partial(prefetch
, cache
=cache
)
369 def update() -> dict:
370 plugins
= self
.load_plugin_spec(config
, input_file
)
373 pool
= Pool(processes
=config
.proc
)
374 results
= pool
.map(_prefetch
, plugins
)
378 plugins
, redirects
= check_results(results
)
380 self
.generate_nix(plugins
, outfile
)
389 return self
.name
+ "Plugins"
391 def get_drv_name(self
, name
: str):
392 return self
.attr_path
+ "." + name
394 def rewrite_input(self
, *args
, **kwargs
):
395 return rewrite_input(*args
, **kwargs
)
397 def create_parser(self
):
398 parser
= argparse
.ArgumentParser(
400 Updates nix derivations for {self.name} plugins.\n
401 By default from {self.default_in} to {self.default_out}"""
409 help=f
"Plugin to add to {self.attr_path} from Github in the form owner/repo",
415 default
=self
.default_in
,
416 help="A list of plugins in the form owner/repo",
422 default
=self
.default_out
,
423 help="Filename to save generated nix code",
431 help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
437 default
=os
.getenv("GITHUB_API_TOKEN"),
438 help="""Allows to set --proc to higher values.
439 Uses GITHUB_API_TOKEN environment variables as the default value.""",
442 "--no-commit", "-n", action
="store_true", default
=False,
443 help="Whether to autocommit changes"
446 "--debug", "-d", choices
=LOG_LEVELS
.keys(),
447 default
=logging
.getLevelName(logging
.WARN
),
448 help="Adjust log level"
454 class CleanEnvironment(object):
455 def __enter__(self
) -> None:
456 self
.old_environ
= os
.environ
.copy()
457 local_pkgs
= str(Path(__file__
).parent
.parent
.parent
)
458 os
.environ
["NIX_PATH"] = f
"localpkgs={local_pkgs}"
459 self
.empty_config
= NamedTemporaryFile()
460 self
.empty_config
.write(b
"{}")
461 self
.empty_config
.flush()
462 os
.environ
["NIXPKGS_CONFIG"] = self
.empty_config
.name
464 def __exit__(self
, exc_type
: Any
, exc_value
: Any
, traceback
: Any
) -> None:
465 os
.environ
.update(self
.old_environ
)
466 self
.empty_config
.close()
471 cache
: "Optional[Cache]" = None,
472 ) -> Tuple
[Plugin
, Optional
[Repo
]]:
473 repo
, branch
, alias
= p
.repo
, p
.branch
, p
.alias
474 name
= alias
or p
.repo
.name
476 log
.info(f
"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
477 commit
, date
= repo
.latest_commit()
478 cached_plugin
= cache
[commit
] if cache
else None
479 if cached_plugin
is not None:
480 log
.debug("Cache hit !")
481 cached_plugin
.name
= name
482 cached_plugin
.date
= date
483 return cached_plugin
, repo
.redirect
485 has_submodules
= repo
.has_submodules()
486 log
.debug(f
"prefetch {name}")
487 sha256
= repo
.prefetch(commit
)
490 Plugin(name
, commit
, has_submodules
, sha256
, date
=date
),
495 def print_download_error(plugin
: PluginDesc
, ex
: Exception):
496 print(f
"{plugin}: {ex}", file=sys
.stderr
)
497 ex_traceback
= ex
.__traceback
__
500 for line
in traceback
.format_exception(ex
.__class
__, ex
, ex_traceback
)
502 print("\n".join(tb_lines
))
505 results
: List
[Tuple
[PluginDesc
, Union
[Exception, Plugin
], Optional
[Repo
]]]
506 ) -> Tuple
[List
[Tuple
[PluginDesc
, Plugin
]], Redirects
]:
508 failures
: List
[Tuple
[PluginDesc
, Exception]] = []
510 redirects
: Redirects
= {}
511 for (pdesc
, result
, redirect
) in results
:
512 if isinstance(result
, Exception):
513 failures
.append((pdesc
, result
))
516 if redirect
is not None:
517 redirects
.update({pdesc
: redirect
})
518 new_pdesc
= PluginDesc(redirect
, pdesc
.branch
, pdesc
.alias
)
519 plugins
.append((new_pdesc
, result
))
521 print(f
"{len(results) - len(failures)} plugins were checked", end
="")
522 if len(failures
) == 0:
524 return plugins
, redirects
526 print(f
", {len(failures)} plugin(s) could not be downloaded:\n")
528 for (plugin
, exception
) in failures
:
529 print_download_error(plugin
, exception
)
533 def make_repo(uri
: str, branch
) -> Repo
:
534 '''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
535 # dumb check to see if it's of the form owner/repo (=> github) or https://...
537 if res
.netloc
in [ "github.com", ""]:
538 res
= res
.path
.strip('/').split('/')
539 repo
= RepoGitHub(res
[0], res
[1], branch
)
541 repo
= Repo(uri
.strip(), branch
)
545 def get_cache_path(cache_file_name
: str) -> Optional
[Path
]:
546 xdg_cache
= os
.environ
.get("XDG_CACHE_HOME", None)
547 if xdg_cache
is None:
548 home
= os
.environ
.get("HOME", None)
551 xdg_cache
= str(Path(home
, ".cache"))
553 return Path(xdg_cache
, cache_file_name
)
557 def __init__(self
, initial_plugins
: List
[Plugin
], cache_file_name
: str) -> None:
558 self
.cache_file
= get_cache_path(cache_file_name
)
561 for plugin
in initial_plugins
:
562 downloads
[plugin
.commit
] = plugin
563 downloads
.update(self
.load())
564 self
.downloads
= downloads
566 def load(self
) -> Dict
[str, Plugin
]:
567 if self
.cache_file
is None or not self
.cache_file
.exists():
570 downloads
: Dict
[str, Plugin
] = {}
571 with
open(self
.cache_file
) as f
:
573 for attr
in data
.values():
575 attr
["name"], attr
["commit"], attr
["has_submodules"], attr
["sha256"]
577 downloads
[attr
["commit"]] = p
580 def store(self
) -> None:
581 if self
.cache_file
is None:
584 os
.makedirs(self
.cache_file
.parent
, exist_ok
=True)
585 with
open(self
.cache_file
, "w+") as f
:
587 for name
, attr
in self
.downloads
.items():
588 data
[name
] = attr
.as_json()
589 json
.dump(data
, f
, indent
=4, sort_keys
=True)
591 def __getitem__(self
, key
: str) -> Optional
[Plugin
]:
592 return self
.downloads
.get(key
, None)
594 def __setitem__(self
, key
: str, value
: Plugin
) -> None:
595 self
.downloads
[key
] = value
599 pluginDesc
: PluginDesc
, cache
: Cache
600 ) -> Tuple
[PluginDesc
, Union
[Exception, Plugin
], Optional
[Repo
]]:
602 plugin
, redirect
= prefetch_plugin(pluginDesc
, cache
)
603 cache
[plugin
.commit
] = plugin
604 return (pluginDesc
, plugin
, redirect
)
605 except Exception as e
:
606 return (pluginDesc
, e
, None)
614 # old pluginDesc and the new
615 redirects
: Redirects
= {},
616 append
: List
[PluginDesc
] = [],
618 plugins
= load_plugins_from_csv(config
, input_file
,)
620 plugins
.extend(append
)
624 cur_date_iso
= datetime
.now().strftime("%Y-%m-%d")
625 with
open(deprecated
, "r") as f
:
626 deprecations
= json
.load(f
)
627 for pdesc
, new_repo
in redirects
.items():
628 new_pdesc
= PluginDesc(new_repo
, pdesc
.branch
, pdesc
.alias
)
629 old_plugin
, _
= prefetch_plugin(pdesc
)
630 new_plugin
, _
= prefetch_plugin(new_pdesc
)
631 if old_plugin
.normalized_name
!= new_plugin
.normalized_name
:
632 deprecations
[old_plugin
.normalized_name
] = {
633 "new": new_plugin
.normalized_name
,
634 "date": cur_date_iso
,
636 with
open(deprecated
, "w") as f
:
637 json
.dump(deprecations
, f
, indent
=4, sort_keys
=True)
640 with
open(input_file
, "w") as f
:
641 log
.debug("Writing into %s", input_file
)
642 # fields = dataclasses.fields(PluginDesc)
643 fieldnames
= ['repo', 'branch', 'alias']
644 writer
= csv
.DictWriter(f
, fieldnames
, dialect
='unix', quoting
=csv
.QUOTE_NONE
)
646 for plugin
in sorted(plugins
):
647 writer
.writerow(asdict(plugin
))
650 def commit(repo
: git
.Repo
, message
: str, files
: List
[Path
]) -> None:
651 repo
.index
.add([str(f
.resolve()) for f
in files
])
653 if repo
.index
.diff("HEAD"):
654 print(f
'committing to nixpkgs "{message}"')
655 repo
.index
.commit(message
)
657 print("no changes in working tree to commit")
661 def update_plugins(editor
: Editor
, args
):
662 """The main entry function of this module. All input arguments are grouped in the `Editor`."""
664 log
.setLevel(LOG_LEVELS
[args
.debug
])
665 log
.info("Start updating plugins")
666 fetch_config
= FetchConfig(args
.proc
, args
.github_token
)
667 update
= editor
.get_update(args
.input_file
, args
.outfile
, fetch_config
)
670 editor
.rewrite_input(fetch_config
, args
.input_file
, editor
.deprecated
, redirects
)
672 autocommit
= not args
.no_commit
675 editor
.nixpkgs_repo
= git
.Repo(editor
.root
, search_parent_directories
=True)
676 commit(editor
.nixpkgs_repo
, f
"{editor.attr_path}: update", [args
.outfile
])
683 f
"{editor.attr_path}: resolve github repository redirects",
684 [args
.outfile
, args
.input_file
, editor
.deprecated
],
687 for plugin_line
in args
.add_plugins
:
688 pdesc
= PluginDesc
.load_from_string(fetch_config
, plugin_line
)
690 editor
.rewrite_input(fetch_config
, args
.input_file
, editor
.deprecated
, append
=append
)
692 plugin
, _
= prefetch_plugin(pdesc
, )
696 "{drv_name}: init at {version}".format(
697 drv_name
=editor
.get_drv_name(plugin
.normalized_name
),
698 version
=plugin
.version
700 [args
.outfile
, args
.input_file
],