1 #! /usr/bin/env nix-shell
2 #! nix-shell -i python -p python3.pkgs.joblib python3.pkgs.click python3.pkgs.click-log nix nix-prefetch-git nix-universal-prefetch prefetch-yarn-deps prefetch-npm-deps
17 from joblib
import Parallel
, delayed
, Memory
18 from codecs
import iterdecode
19 from datetime
import datetime
20 from urllib
.request
import urlopen
22 os
.chdir(os
.path
.dirname(__file__
))
24 depot_tools_checkout
= tempfile
.TemporaryDirectory()
25 subprocess
.check_call([
27 "--builder", "--quiet",
28 "--url", "https://chromium.googlesource.com/chromium/tools/depot_tools",
29 "--out", depot_tools_checkout
.name
,
30 "--rev", "7a69b031d58081d51c9e8e89557b343bba8518b1"])
31 sys
.path
.append(depot_tools_checkout
.name
)
36 memory
= Memory("cache", verbose
=0)
39 def get_repo_hash(fetcher
, args
):
40 cmd
= ['nix-universal-prefetch', fetcher
]
41 for arg_name
, arg
in args
.items():
42 cmd
.append(f
'--{arg_name}')
45 print(" ".join(cmd
), file=sys
.stderr
)
46 out
= subprocess
.check_output(cmd
)
47 return out
.decode('utf-8').strip()
50 def _get_yarn_hash(file):
51 print(f
'prefetch-yarn-deps', file=sys
.stderr
)
52 with tempfile
.TemporaryDirectory() as tmp_dir
:
53 with
open(tmp_dir
+ '/yarn.lock', 'w') as f
:
55 return subprocess
.check_output(['prefetch-yarn-deps', tmp_dir
+ '/yarn.lock']).decode('utf-8').strip()
56 def get_yarn_hash(repo
, yarn_lock_path
= 'yarn.lock'):
57 return _get_yarn_hash(repo
.get_file(yarn_lock_path
))
60 def _get_npm_hash(file):
61 print(f
'prefetch-npm-deps', file=sys
.stderr
)
62 with tempfile
.TemporaryDirectory() as tmp_dir
:
63 with
open(tmp_dir
+ '/package-lock.json', 'w') as f
:
65 return subprocess
.check_output(['prefetch-npm-deps', tmp_dir
+ '/package-lock.json']).decode('utf-8').strip()
66 def get_npm_hash(repo
, package_lock_path
= 'package-lock.json'):
67 return _get_npm_hash(repo
.get_file(package_lock_path
))
72 self
.hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
74 def get_deps(self
, repo_vars
, path
):
75 print("evaluating " + json
.dumps(self
, default
= vars), file=sys
.stderr
)
77 deps_file
= self
.get_file("DEPS")
78 evaluated
= gclient_eval
.Parse(deps_file
, filename
='DEPS')
80 repo_vars
= dict(evaluated
["vars"]) | repo_vars
82 prefix
= f
"{path}/" if evaluated
.get("use_relative_paths", False) else ""
85 prefix
+ dep_name
: repo_from_dep(dep
)
86 for dep_name
, dep
in evaluated
["deps"].items()
87 if (gclient_eval
.EvaluateCondition(dep
["condition"], repo_vars
) if "condition" in dep
else True) and repo_from_dep(dep
) != None
90 for key
in evaluated
.get("recursedeps", []):
91 dep_path
= prefix
+ key
92 if dep_path
in self
.deps
and dep_path
!= "src/third_party/squirrel.mac":
93 self
.deps
[dep_path
].get_deps(repo_vars
, dep_path
)
96 self
.hash = get_repo_hash(self
.fetcher
, self
.args
)
98 def prefetch_all(self
):
99 return sum([dep
.prefetch_all() for [_
, dep
] in self
.deps
.items()], [delayed(self
.prefetch
)()])
101 def flatten_repr(self
):
103 "fetcher": self
.fetcher
,
108 def flatten(self
, path
):
110 path
: self
.flatten_repr()
112 for dep_path
, dep
in self
.deps
.items():
113 out |
= dep
.flatten(dep_path
)
117 def __init__(self
, url
, rev
):
119 self
.fetcher
= 'fetchgit'
125 class GitHubRepo(Repo
):
126 def __init__(self
, owner
, repo
, rev
):
128 self
.fetcher
= 'fetchFromGitHub'
135 def get_file(self
, filepath
):
136 return urlopen(f
"https://raw.githubusercontent.com/{self.args['owner']}/{self.args['repo']}/{self.args['rev']}/{filepath}").read().decode('utf-8')
138 class GitilesRepo(Repo
):
139 def __init__(self
, url
, rev
):
141 self
.fetcher
= 'fetchFromGitiles'
142 #self.fetcher = 'fetchgit'
146 #"fetchSubmodules": "false",
149 if url
== "https://chromium.googlesource.com/chromium/src.git":
150 self
.args
['postFetch'] = "rm -r $out/third_party/blink/web_tests; "
151 self
.args
['postFetch'] += "rm -r $out/third_party/hunspell/tests; "
152 self
.args
['postFetch'] += "rm -r $out/content/test/data; "
153 self
.args
['postFetch'] += "rm -r $out/courgette/testdata; "
154 self
.args
['postFetch'] += "rm -r $out/extensions/test/data; "
155 self
.args
['postFetch'] += "rm -r $out/media/test/data; "
157 def get_file(self
, filepath
):
158 return base64
.b64decode(urlopen(f
"{self.args['url']}/+/{self.args['rev']}/{filepath}?format=TEXT").read()).decode('utf-8')
160 def repo_from_dep(dep
):
162 url
, rev
= gclient_utils
.SplitUrlRevision(dep
["url"])
164 search_object
= re
.search(r
'https://github.com/(.+)/(.+?)(\.git)?$', url
)
166 return GitHubRepo(search_object
.group(1), search_object
.group(2), rev
)
168 if re
.match(r
'https://.+.googlesource.com', url
):
169 return GitilesRepo(url
, rev
)
171 return GitRepo(url
, rev
)
173 # Not a git dependency; skip
176 def get_gn_source(repo
):
177 gn_pattern
= r
"'gn_version': 'git_revision:([0-9a-f]{40})'"
178 gn_commit
= re
.search(gn_pattern
, repo
.get_file("DEPS")).group(1)
179 gn
= subprocess
.check_output([
182 "https://gn.googlesource.com/gn",
188 "version": datetime
.fromisoformat(gn
["date"]).date().isoformat(),
195 def get_electron_info(major_version
):
196 electron_releases
= json
.loads(urlopen("https://releases.electronjs.org/releases.json").read())
197 major_version_releases
= filter(lambda item
: item
["version"].startswith(f
"{major_version}."), electron_releases
)
198 m
= max(major_version_releases
, key
=lambda item
: item
["date"])
200 rev
=f
"v{m['version']}"
202 electron_repo
= GitHubRepo("electron", "electron", rev
)
203 electron_repo
.recurse
= True
205 electron_repo
.get_deps({
206 f
"checkout_{platform}": platform
== "linux"
207 for platform
in ["ios", "chromeos", "android", "mac", "win", "linux"]
210 return (major_version
, m
, electron_repo
)
212 logger
= logging
.getLogger(__name__
)
213 click_log
.basic_config(logger
)
220 @click.option("--version", help="The major version, e.g. '23'")
222 (_
, _
, repo
) = electron_repo
= get_electron_info(version
)
223 tree
= electron_repo
.flatten("src/electron")
224 print(json
.dumps(tree
, indent
=4, default
= vars))
226 def get_update(repo
):
227 (major_version
, m
, electron_repo
) = repo
229 tasks
= electron_repo
.prefetch_all()
231 ("electron_yarn_hash", get_yarn_hash(electron_repo
))
233 tasks
.append(delayed(a
)())
235 ("chromium_npm_hash", get_npm_hash(electron_repo
.deps
["src"], "third_party/node/package-lock.json"))
237 tasks
.append(delayed(a
)())
238 random
.shuffle(tasks
)
240 task_results
= {n
[0]: n
[1] for n
in Parallel(n_jobs
=3, require
='sharedmem', return_as
="generator")(tasks
) if n
!= None}
242 tree
= electron_repo
.flatten("src/electron")
244 return (f
"{major_version}", {
246 **{key
: m
[key
] for key
in ["version", "modules", "chrome", "node"]},
248 "version": m
['chrome'],
249 "deps": get_gn_source(electron_repo
.deps
["src"])
254 @cli.command("update")
255 @click.option("--version", help="The major version, e.g. '23'")
258 with
open('info.json', 'r') as f
:
259 old_info
= json
.loads(f
.read())
262 repo
= get_electron_info(version
)
263 update
= get_update(repo
)
264 out
= old_info |
{ update
[0]: update
[1] }
265 with
open('info.json', 'w') as f
:
266 f
.write(json
.dumps(out
, indent
=4, default
= vars))
269 @cli.command("update-all")
271 repos
= Parallel(n_jobs
=2, require
='sharedmem')(delayed(get_electron_info
)(major_version
) for major_version
in range(28, 24, -1))
272 out
= {n
[0]: n
[1] for n
in Parallel(n_jobs
=2, require
='sharedmem')(delayed(get_update
)(repo
) for repo
in repos
)}
274 with
open('info.json', 'w') as f
:
275 f
.write(json
.dumps(out
, indent
=4, default
= vars))
278 if __name__
== "__main__":