1 #!/usr/bin/env nix-shell
2 #! nix-shell -i python -p "python3.withPackages (ps: with ps; [ ps.absl-py ps.requests ])" nix
4 from collections
import defaultdict
6 from dataclasses
import dataclass
10 from typing
import Callable
, Dict
13 from absl
import flags
14 from absl
import logging
18 FACTORIO_API
= "https://factorio.com/api/latest-releases"
23 flags
.DEFINE_string('username', '', 'Factorio username for retrieving binaries.')
24 flags
.DEFINE_string('token', '', 'Factorio token for retrieving binaries.')
25 flags
.DEFINE_string('out', '', 'Output path for versions.json.')
26 flags
.DEFINE_list('release_type', '', 'If non-empty, a comma-separated list of release types to update (e.g. alpha).')
27 flags
.DEFINE_list('release_channel', '', 'If non-empty, a comma-separated list of release channels to update (e.g. experimental).')
40 needs_auth
: bool = False
48 FactorioVersionsJSON
= Dict
[str, Dict
[str, str]]
49 OurVersionJSON
= Dict
[str, Dict
[str, Dict
[str, Dict
[str, str]]]]
53 System(nix_name
="x86_64-linux", url_name
="linux64", tar_name
="x64"),
57 ReleaseType("alpha", needs_auth
=True),
59 ReleaseType("headless"),
63 ReleaseChannel("experimental"),
64 ReleaseChannel("stable"),
68 def find_versions_json() -> str:
71 try_paths
= ["pkgs/games/factorio/versions.json", "versions.json"]
72 for path
in try_paths
:
73 if os
.path
.exists(path
):
75 raise Exception("Couldn't figure out where to write versions.json; try specifying --out")
78 def fetch_versions() -> FactorioVersionsJSON
:
79 return json
.loads(requests
.get("https://factorio.com/api/latest-releases").text
)
82 def generate_our_versions(factorio_versions
: FactorioVersionsJSON
) -> OurVersionJSON
:
83 rec_dd
= lambda: defaultdict(rec_dd
)
86 # Deal with times where there's no experimental version
87 for rc
in RELEASE_CHANNELS
:
88 if not factorio_versions
[rc
.name
]:
89 factorio_versions
[rc
.name
] = factorio_versions
['stable']
91 for system
in SYSTEMS
:
92 for release_type
in RELEASE_TYPES
:
93 for release_channel
in RELEASE_CHANNELS
:
94 version
= factorio_versions
[release_channel
.name
].get(release_type
.name
)
98 "name": f
"factorio_{release_type.name}_{system.tar_name}-{version}.tar.xz",
99 "url": f
"https://factorio.com/get-download/{version}/{release_type.name}/{system.url_name}",
101 "needsAuth": release_type
.needs_auth
,
102 "tarDirectory": system
.tar_name
,
104 output
[system
.nix_name
][release_type
.name
][release_channel
.name
] = this_release
108 def iter_version(versions
: OurVersionJSON
, it
: Callable
[[str, str, str, Dict
[str, str]], Dict
[str, str]]) -> OurVersionJSON
:
109 versions
= copy
.deepcopy(versions
)
110 for system_name
, system
in versions
.items():
111 for release_type_name
, release_type
in system
.items():
112 for release_channel_name
, release
in release_type
.items():
113 release_type
[release_channel_name
] = it(system_name
, release_type_name
, release_channel_name
, dict(release
))
117 def merge_versions(old
: OurVersionJSON
, new
: OurVersionJSON
) -> OurVersionJSON
:
118 """Copies already-known hashes from version.json to avoid having to re-fetch."""
119 def _merge_version(system_name
: str, release_type_name
: str, release_channel_name
: str, release
: Dict
[str, str]) -> Dict
[str, str]:
120 old_system
= old
.get(system_name
, {})
121 old_release_type
= old_system
.get(release_type_name
, {})
122 old_release
= old_release_type
.get(release_channel_name
, {})
123 if FLAGS
.release_type
and release_type_name
not in FLAGS
.release_type
:
124 logging
.info("%s/%s/%s: not in --release_type, not updating", system_name
, release_type_name
, release_channel_name
)
126 if FLAGS
.release_channel
and release_channel_name
not in FLAGS
.release_channel
:
127 logging
.info("%s/%s/%s: not in --release_channel, not updating", system_name
, release_type_name
, release_channel_name
)
129 if not "sha256" in old_release
:
130 logging
.info("%s/%s/%s: not copying sha256 since it's missing", system_name
, release_type_name
, release_channel_name
)
132 if not all(old_release
.get(k
, None) == release
[k
] for k
in ['name', 'version', 'url']):
133 logging
.info("%s/%s/%s: not copying sha256 due to mismatch", system_name
, release_type_name
, release_channel_name
)
135 release
["sha256"] = old_release
["sha256"]
137 return iter_version(new
, _merge_version
)
140 def nix_prefetch_url(name
: str, url
: str, algo
: str = 'sha256') -> str:
141 cmd
= ['nix-prefetch-url', '--type', algo
, '--name', name
, url
]
142 logging
.info('running %s', cmd
)
143 out
= subprocess
.check_output(cmd
)
144 return out
.decode('utf-8').strip()
147 def fill_in_hash(versions
: OurVersionJSON
) -> OurVersionJSON
:
148 """Fill in sha256 hashes for anything missing them."""
150 def _fill_in_hash(system_name
: str, release_type_name
: str, release_channel_name
: str, release
: Dict
[str, str]) -> Dict
[str, str]:
151 if "sha256" in release
:
152 logging
.info("%s/%s/%s: skipping fetch, sha256 already present", system_name
, release_type_name
, release_channel_name
)
155 if url
in urls_to_hash
:
156 logging
.info("%s/%s/%s: found url %s in cache", system_name
, release_type_name
, release_channel_name
, url
)
157 release
["sha256"] = urls_to_hash
[url
]
159 logging
.info("%s/%s/%s: fetching %s", system_name
, release_type_name
, release_channel_name
, url
)
160 if release
["needsAuth"]:
161 if not FLAGS
.username
or not FLAGS
.token
:
162 raise Exception("fetching %s/%s/%s from %s requires --username and --token" % (system_name
, release_type_name
, release_channel_name
, url
))
163 url
+= f
"?username={FLAGS.username}&token={FLAGS.token}"
164 release
["sha256"] = nix_prefetch_url(release
["name"], url
)
165 urls_to_hash
[url
] = release
["sha256"]
167 return iter_version(versions
, _fill_in_hash
)
171 factorio_versions
= fetch_versions()
172 new_our_versions
= generate_our_versions(factorio_versions
)
173 old_our_versions
= None
174 our_versions_path
= find_versions_json()
175 if our_versions_path
:
176 logging
.info('Loading old versions.json from %s', our_versions_path
)
177 with
open(our_versions_path
, 'r') as f
:
178 old_our_versions
= json
.load(f
)
180 logging
.info('Merging in old hashes')
181 new_our_versions
= merge_versions(old_our_versions
, new_our_versions
)
182 logging
.info('Fetching necessary tars to get hashes')
183 new_our_versions
= fill_in_hash(new_our_versions
)
184 with
open(our_versions_path
, 'w') as f
:
185 logging
.info('Writing versions.json to %s', our_versions_path
)
186 json
.dump(new_our_versions
, f
, sort_keys
=True, indent
=2)
189 if __name__
== '__main__':