Merge pull request #268619 from tweag/lib-descriptions
[NixPkgs.git] / pkgs / development / tools / electron / update.py
blob128b1dc05067278b92074d098eb85ca884b1d336
1 #! /usr/bin/env nix-shell
2 #! nix-shell -i python -p python3.pkgs.joblib python3.pkgs.click python3.pkgs.click-log nix nix-prefetch-git nix-universal-prefetch prefetch-yarn-deps prefetch-npm-deps
4 import logging
5 import click_log
6 import click
7 import random
8 import traceback
9 import csv
10 import base64
11 import os
12 import re
13 import tempfile
14 import subprocess
15 import json
16 import sys
17 from joblib import Parallel, delayed, Memory
18 from codecs import iterdecode
19 from datetime import datetime
20 from urllib.request import urlopen
22 os.chdir(os.path.dirname(__file__))
24 depot_tools_checkout = tempfile.TemporaryDirectory()
25 subprocess.check_call([
26 "nix-prefetch-git",
27 "--builder", "--quiet",
28 "--url", "https://chromium.googlesource.com/chromium/tools/depot_tools",
29 "--out", depot_tools_checkout.name,
30 "--rev", "7a69b031d58081d51c9e8e89557b343bba8518b1"])
31 sys.path.append(depot_tools_checkout.name)
33 import gclient_eval
34 import gclient_utils
36 memory = Memory("cache", verbose=0)
38 @memory.cache
39 def get_repo_hash(fetcher, args):
40 cmd = ['nix-universal-prefetch', fetcher]
41 for arg_name, arg in args.items():
42 cmd.append(f'--{arg_name}')
43 cmd.append(arg)
45 print(" ".join(cmd), file=sys.stderr)
46 out = subprocess.check_output(cmd)
47 return out.decode('utf-8').strip()
49 @memory.cache
50 def _get_yarn_hash(file):
51 print(f'prefetch-yarn-deps', file=sys.stderr)
52 with tempfile.TemporaryDirectory() as tmp_dir:
53 with open(tmp_dir + '/yarn.lock', 'w') as f:
54 f.write(file)
55 return subprocess.check_output(['prefetch-yarn-deps', tmp_dir + '/yarn.lock']).decode('utf-8').strip()
56 def get_yarn_hash(repo, yarn_lock_path = 'yarn.lock'):
57 return _get_yarn_hash(repo.get_file(yarn_lock_path))
59 @memory.cache
60 def _get_npm_hash(file):
61 print(f'prefetch-npm-deps', file=sys.stderr)
62 with tempfile.TemporaryDirectory() as tmp_dir:
63 with open(tmp_dir + '/package-lock.json', 'w') as f:
64 f.write(file)
65 return subprocess.check_output(['prefetch-npm-deps', tmp_dir + '/package-lock.json']).decode('utf-8').strip()
66 def get_npm_hash(repo, package_lock_path = 'package-lock.json'):
67 return _get_npm_hash(repo.get_file(package_lock_path))
69 class Repo:
70 def __init__(self):
71 self.deps = {}
72 self.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
74 def get_deps(self, repo_vars, path):
75 print("evaluating " + json.dumps(self, default = vars), file=sys.stderr)
77 deps_file = self.get_file("DEPS")
78 evaluated = gclient_eval.Parse(deps_file, filename='DEPS')
80 repo_vars = dict(evaluated["vars"]) | repo_vars
82 prefix = f"{path}/" if evaluated.get("use_relative_paths", False) else ""
84 self.deps = {
85 prefix + dep_name: repo_from_dep(dep)
86 for dep_name, dep in evaluated["deps"].items()
87 if (gclient_eval.EvaluateCondition(dep["condition"], repo_vars) if "condition" in dep else True) and repo_from_dep(dep) != None
90 for key in evaluated.get("recursedeps", []):
91 dep_path = prefix + key
92 if dep_path in self.deps and dep_path != "src/third_party/squirrel.mac":
93 self.deps[dep_path].get_deps(repo_vars, dep_path)
95 def prefetch(self):
96 self.hash = get_repo_hash(self.fetcher, self.args)
98 def prefetch_all(self):
99 return sum([dep.prefetch_all() for [_, dep] in self.deps.items()], [delayed(self.prefetch)()])
101 def flatten_repr(self):
102 return {
103 "fetcher": self.fetcher,
104 "hash": self.hash,
105 **self.args
108 def flatten(self, path):
109 out = {
110 path: self.flatten_repr()
112 for dep_path, dep in self.deps.items():
113 out |= dep.flatten(dep_path)
114 return out
116 class GitRepo(Repo):
117 def __init__(self, url, rev):
118 super().__init__()
119 self.fetcher = 'fetchgit'
120 self.args = {
121 "url": url,
122 "rev": rev,
125 class GitHubRepo(Repo):
126 def __init__(self, owner, repo, rev):
127 super().__init__()
128 self.fetcher = 'fetchFromGitHub'
129 self.args = {
130 "owner": owner,
131 "repo": repo,
132 "rev": rev,
135 def get_file(self, filepath):
136 return urlopen(f"https://raw.githubusercontent.com/{self.args['owner']}/{self.args['repo']}/{self.args['rev']}/{filepath}").read().decode('utf-8')
138 class GitilesRepo(Repo):
139 def __init__(self, url, rev):
140 super().__init__()
141 self.fetcher = 'fetchFromGitiles'
142 #self.fetcher = 'fetchgit'
143 self.args = {
144 "url": url,
145 "rev": rev,
146 #"fetchSubmodules": "false",
149 if url == "https://chromium.googlesource.com/chromium/src.git":
150 self.args['postFetch'] = "rm -r $out/third_party/blink/web_tests; "
151 self.args['postFetch'] += "rm -r $out/third_party/hunspell/tests; "
152 self.args['postFetch'] += "rm -r $out/content/test/data; "
153 self.args['postFetch'] += "rm -r $out/courgette/testdata; "
154 self.args['postFetch'] += "rm -r $out/extensions/test/data; "
155 self.args['postFetch'] += "rm -r $out/media/test/data; "
157 def get_file(self, filepath):
158 return base64.b64decode(urlopen(f"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT").read()).decode('utf-8')
160 def repo_from_dep(dep):
161 if "url" in dep:
162 url, rev = gclient_utils.SplitUrlRevision(dep["url"])
164 search_object = re.search(r'https://github.com/(.+)/(.+?)(\.git)?$', url)
165 if search_object:
166 return GitHubRepo(search_object.group(1), search_object.group(2), rev)
168 if re.match(r'https://.+.googlesource.com', url):
169 return GitilesRepo(url, rev)
171 return GitRepo(url, rev)
172 else:
173 # Not a git dependency; skip
174 return None
176 def get_gn_source(repo):
177 gn_pattern = r"'gn_version': 'git_revision:([0-9a-f]{40})'"
178 gn_commit = re.search(gn_pattern, repo.get_file("DEPS")).group(1)
179 gn = subprocess.check_output([
180 "nix-prefetch-git",
181 "--quiet",
182 "https://gn.googlesource.com/gn",
183 "--rev", gn_commit
185 gn = json.loads(gn)
186 return {
187 "gn": {
188 "version": datetime.fromisoformat(gn["date"]).date().isoformat(),
189 "url": gn["url"],
190 "rev": gn["rev"],
191 "hash": gn["hash"]
195 def get_electron_info(major_version):
196 electron_releases = json.loads(urlopen("https://releases.electronjs.org/releases.json").read())
197 major_version_releases = filter(lambda item: item["version"].startswith(f"{major_version}."), electron_releases)
198 m = max(major_version_releases, key=lambda item: item["date"])
200 rev=f"v{m['version']}"
202 electron_repo = GitHubRepo("electron", "electron", rev)
203 electron_repo.recurse = True
205 electron_repo.get_deps({
206 f"checkout_{platform}": platform == "linux"
207 for platform in ["ios", "chromeos", "android", "mac", "win", "linux"]
208 }, "src/electron")
210 return (major_version, m, electron_repo)
212 logger = logging.getLogger(__name__)
213 click_log.basic_config(logger)
215 @click.group()
216 def cli():
217 pass
219 @cli.command("eval")
220 @click.option("--version", help="The major version, e.g. '23'")
221 def eval(version):
222 (_, _, repo) = electron_repo = get_electron_info(version)
223 tree = electron_repo.flatten("src/electron")
224 print(json.dumps(tree, indent=4, default = vars))
226 def get_update(repo):
227 (major_version, m, electron_repo) = repo
229 tasks = electron_repo.prefetch_all()
230 a = lambda: (
231 ("electron_yarn_hash", get_yarn_hash(electron_repo))
233 tasks.append(delayed(a)())
234 a = lambda: (
235 ("chromium_npm_hash", get_npm_hash(electron_repo.deps["src"], "third_party/node/package-lock.json"))
237 tasks.append(delayed(a)())
238 random.shuffle(tasks)
240 task_results = {n[0]: n[1] for n in Parallel(n_jobs=3, require='sharedmem', return_as="generator")(tasks) if n != None}
242 tree = electron_repo.flatten("src/electron")
244 return (f"{major_version}", {
245 "deps": tree,
246 **{key: m[key] for key in ["version", "modules", "chrome", "node"]},
247 "chromium": {
248 "version": m['chrome'],
249 "deps": get_gn_source(electron_repo.deps["src"])
251 **task_results
254 @cli.command("update")
255 @click.option("--version", help="The major version, e.g. '23'")
256 def update(version):
257 try:
258 with open('info.json', 'r') as f:
259 old_info = json.loads(f.read())
260 except:
261 old_info = {}
262 repo = get_electron_info(version)
263 update = get_update(repo)
264 out = old_info | { update[0]: update[1] }
265 with open('info.json', 'w') as f:
266 f.write(json.dumps(out, indent=4, default = vars))
267 f.write('\n')
269 @cli.command("update-all")
270 def update_all():
271 repos = Parallel(n_jobs=2, require='sharedmem')(delayed(get_electron_info)(major_version) for major_version in range(28, 24, -1))
272 out = {n[0]: n[1] for n in Parallel(n_jobs=2, require='sharedmem')(delayed(get_update)(repo) for repo in repos)}
274 with open('info.json', 'w') as f:
275 f.write(json.dumps(out, indent=4, default = vars))
276 f.write('\n')
278 if __name__ == "__main__":
279 cli()