1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
5 # Note: This script was written to follow the same business logic
6 # as make_incremental_update.sh and funsize logic. There are many
7 # opportunities for refactoring and improving how this works.
8 # Some improvement ideas:
9 # - The script diffs xz-compressed files. This is not optimal,
10 # if we change XZ compression options, this will cause the
11 # partial to have unnecessary updates.
12 # - Only decompress the target complete mar once
13 # - Separate this script into a python module with multiple files (ie: download, validation, diffing)
14 # - Implement caching of diffs. (Keeping in mind SCCACHE needs to
16 # https://bugzilla.mozilla.org/show_bug.cgi?id=1842209
17 # - Writing of the manifest file could be done at the very end
18 # instead of multiple writes
19 # - Check file signature
20 # - Check ALLOWED_URL_PREFIXES
21 # - Check mar channel ids
38 from concurrent
.futures
import ProcessPoolExecutor
39 from tempfile
import NamedTemporaryFile
41 # import multiprocessing
43 # Additional flags for XZ compression
48 "macos-x86_64-aarch64": [],
51 logging
.basicConfig(level
=logging
.INFO
)
54 # TODO: use logging context instead of this
55 # https://docs.python.org/3/howto/logging-cookbook.html#context-info
56 def log(msg
, func
=""):
57 logging
.info(f
"[pid: {os.getpid()}] {func}: {msg}")
60 def xz_compression_options(arch
):
65 *BCJ_OPTIONS
.get(arch
, []),
73 # Copied from scriptworker
75 h
= hashlib
.new("sha512")
76 with
open(path
, "rb") as f
:
77 for chunk
in iter(functools
.partial(f
.read
, 4096), b
""):
82 # The thread-safety of this function should be ok, given that each thread only reads it's own from_mar
83 # and the main thread reads the to_mar
85 def get_text_from_compressed(path
):
86 proc
= subprocess
.run(
87 ("xz", "-d", "-c", path
),
95 def get_option_from_compressed(directory
, filename
, section
, option
):
96 """Gets an option from an XZ compressed config file"""
98 f
"Extracting [{section}]: {option} from {directory}/**/{filename}",
99 "get_option_from_compressed",
101 files
= list(glob
.glob(f
"{directory}/**/{filename}", recursive
=True))
103 raise Exception(f
"Could not find {filename} in {directory}")
105 contents
= get_text_from_compressed(f
)
106 config
= configparser
.ConfigParser()
107 config
.read_string(contents
)
108 rv
= config
.get(section
, option
)
109 log(f
"Found {section}.{option}: {rv}", "get_option_from_compressed")
113 def check_for_forced_update(force_list
, file_path
):
114 """Check for files that are forced to update. Note: .chk files are always force updated"""
115 # List of files that are always force updated
116 always_force_updated
= (
118 "Contents/Resources/precomplete",
120 "Contents/Resources/removed-files",
121 "Contents/CodeResources",
124 file_path
in always_force_updated
125 or file_path
.endswith(".chk")
126 or file_path
in force_list
130 def list_files_and_dirs(dir_path
):
133 for root
, directories
, filenames
in os
.walk(dir_path
):
134 for directory
in directories
:
135 dirs
.append(os
.path
.relpath(os
.path
.join(root
, directory
), dir_path
))
136 for filename
in filenames
:
137 files
.append(os
.path
.relpath(os
.path
.join(root
, filename
), dir_path
))
141 def make_add_instruction(filename
, manifest
):
142 """Adds an instruction to the update manifest file."""
143 # Check if the path is an extension directory
144 is_extension
= re
.search(r
"distribution/extensions/.*/", filename
) is not None
147 # Extract the subdirectory to test before adding
148 testdir
= re
.sub(r
"(.*distribution/extensions/[^/]*)/.*", r
"\1", filename
)
149 with
open(manifest
, "a") as file:
150 file.write(f
'add-if "{testdir}" "{filename}"\n')
152 with
open(manifest
, "a") as file:
153 file.write(f
'add "{filename}"\n')
156 def check_for_add_if_not_update(filename
):
157 basename
= os
.path
.basename(filename
)
159 basename
in {"channel-prefs.js", "update-settings.ini"}
160 or re
.search(r
"(^|/)ChannelPrefs\.framework/", filename
)
161 or re
.search(r
"(^|/)UpdateSettings\.framework/", filename
)
165 def make_patch_instruction(filename
, manifest
):
166 with
open(manifest
, "a") as manifest_file
:
167 manifest_file
.write(f
'patch "{filename}"\n')
170 def add_remove_instructions(remove_array
, manifest
):
171 with
open(manifest
, "a") as manifest_file
:
172 for file in remove_array
:
173 manifest_file
.write(f
'remove "{file}"\n')
176 def make_add_if_not_instruction(filename
, manifest
):
177 with
open(manifest
, "a") as manifest_file
:
178 manifest_file
.write(f
'add-if-not "{filename}" "{filename}"\n')
181 def append_remove_instructions(newdir
, manifest
):
182 removed_files_path
= os
.path
.join(newdir
, "removed-files")
183 if os
.path
.exists(removed_files_path
):
184 with
NamedTemporaryFile() as rmv
, open(rmv
.name
, "r") as f
:
185 xz_cmd(("--decompress",), removed_files_path
, rmv
.name
)
186 removed_files
= f
.readlines()
187 with
open(manifest
, "a") as manifest_file
:
188 for file in removed_files
:
189 manifest_file
.write(f
'remove "{file.strip()}"\n')
192 def mar_extract(source_mar
, destination
):
193 os
.makedirs(destination
, exist_ok
=True)
194 cmd
= ("mar", "-C", os
.path
.abspath(destination
), "-x", os
.path
.abspath(source_mar
))
195 log(f
"Running mar extract command: {cmd}", "mar_extract")
197 subprocess
.run(cmd
, check
=True)
198 except subprocess
.CalledProcessError
as e
:
199 log(f
"Error extracting mar: {e.stderr}", "mar_extract")
200 raise Exception(f
"Mar failed with code {e.returncode}")
203 def xz_cmd(cmd
, source_file
, destination_file
):
204 """Run xz command via pipes to avoid file extension checks."""
205 os
.makedirs(os
.path
.dirname(destination_file
), exist_ok
=True)
206 with
open(destination_file
, "wb") as dest_fd
, open(source_file
, "rb") as source_fd
:
208 subprocess
.run(("xz", *cmd
), stdin
=source_fd
, stdout
=dest_fd
, check
=True)
209 except subprocess
.CalledProcessError
as e
:
211 f
"XZ Failure running xz {cmd} on {source_file} to {destination_file}: {e.stderr}",
214 raise Exception(f
"XZ exited with code {e.returncode}")
217 def create_patch(from_file
, to_file
, destination_patch
):
218 """Create a patch between 2 xz compressed files"""
219 log(f
"{from_file} -> {destination_patch}", "create_patch")
222 NamedTemporaryFile() as from_fd
,
223 NamedTemporaryFile() as to_fd
,
224 NamedTemporaryFile() as patch_fd
,
226 xz_cmd(("--decompress",), from_file
, from_fd
.name
)
228 # TODO: Potentially don't decompress to_mar files once per thread?
229 xz_cmd(("--decompress",), to_file
, to_fd
.name
)
231 # args = f"zucchini -gen '{from_fd.name}' '{to_fd.name}' '{patch_fd.name}'"
232 args
= ["zucchini", "-gen", from_fd
.name
, to_fd
.name
, patch_fd
.name
]
234 subprocess
.run(args
, check
=True)
235 except subprocess
.CalledProcessError
as e
:
236 log(f
"Zucchini failed to create patch:\n{e.stderr}", "create_patch")
237 raise Exception(f
"Zucchini exited with code: {e.returncode}")
239 xz_cmd(("--compress", "-9", "-e", "-c"), patch_fd
.name
, destination_patch
)
242 def make_partial(from_mar_url
, to_mar_dir
, target_mar
, workdir
, arch
="", force
=None):
244 from_mar
= os
.path
.join(workdir
, "from.mar")
245 download_file(from_mar_url
, from_mar
)
247 requested_forced_updates
= force
or []
248 # MacOS firefox binary is always forced update
249 requested_forced_updates
.append("Contents/MacOS/firefox")
250 manifest_file
= os
.path
.join(workdir
, "updatev3.manifest")
252 # Holds the relative path to all archive files to be added to the partial
256 from_mar_dir
= os
.path
.join(workdir
, "from_mar")
257 mar_extract(from_mar
, from_mar_dir
)
259 # Log current version for easier referencing
260 from_version
= get_option_from_compressed(
261 from_mar_dir
, "application.ini", "App", "Version"
263 log(f
"Processing from_mar: {from_version}", "make_partial")
265 partials_dir
= os
.path
.abspath(os
.path
.join(workdir
, "partials"))
266 os
.makedirs(partials_dir
, exist_ok
=True)
268 # List files and directories
269 oldfiles
, olddirs
= list_files_and_dirs(from_mar_dir
)
270 newfiles
, newdirs
= list_files_and_dirs(to_mar_dir
)
272 for newdir
in newdirs
:
273 os
.makedirs(os
.path
.join(partials_dir
, newdir
), exist_ok
=True)
275 # Check if precomplete file exists in the new directory
276 if not os
.path
.exists(
277 os
.path
.join(to_mar_dir
, "precomplete")
278 ) and not os
.path
.exists(
279 os
.path
.join(to_mar_dir
, "Contents/Resources/precomplete")
281 log("precomplete file is missing!", "make_partial")
284 # Create update manifest
285 with
open(manifest_file
, "w") as manifest_fd
:
286 manifest_fd
.write('type "partial"\n')
290 # Process files for patching
291 # Note: these files are already XZ compressed
292 for rel_path
in oldfiles
:
293 new_file_abs
= os
.path
.join(to_mar_dir
, rel_path
)
294 old_file_abs
= os
.path
.join(from_mar_dir
, rel_path
)
296 if os
.path
.exists(new_file_abs
):
297 patch_file
= os
.path
.join(partials_dir
, rel_path
)
298 if check_for_add_if_not_update(old_file_abs
):
299 make_add_if_not_instruction(rel_path
, manifest_file
)
300 shutil
.copy2(new_file_abs
, patch_file
)
301 archivefiles
.append(rel_path
)
302 elif check_for_forced_update(requested_forced_updates
, rel_path
):
303 make_add_instruction(rel_path
, manifest_file
)
304 shutil
.copy2(new_file_abs
, patch_file
)
305 archivefiles
.append(rel_path
)
307 # TODO: !!! This check will always trigger if we switch XZ options!
309 ("diff", old_file_abs
, new_file_abs
),
314 # Check for smaller patch or full file size and choose the smaller of the two to package
315 create_patch(old_file_abs
, new_file_abs
, f
"{patch_file}.patch")
317 os
.stat(f
"{patch_file}.patch").st_size
318 > os
.stat(new_file_abs
).st_size
320 make_add_instruction(rel_path
, manifest_file
)
321 os
.unlink(f
"{patch_file}.patch")
322 shutil
.copy2(new_file_abs
, patch_file
)
323 archivefiles
.append(rel_path
)
325 make_patch_instruction(patch_file
, manifest_file
)
326 path_relpath
= os
.path
.relpath(patch_file
, partials_dir
)
327 archivefiles
.append(f
"{path_relpath}.patch")
330 remove_array
.append(rel_path
)
333 for newfile_rel
in newfiles
:
334 new_file_abs
= os
.path
.join(to_mar_dir
, newfile_rel
)
335 if newfile_rel
not in oldfiles
:
336 patch_file
= os
.path
.join(partials_dir
, newfile_rel
)
337 make_add_instruction(newfile_rel
, manifest_file
)
338 archivefiles
.append(newfile_rel
)
339 shutil
.copy2(new_file_abs
, patch_file
)
342 add_remove_instructions(remove_array
, manifest_file
)
344 # Add directory removal instructions from removed-files
345 append_remove_instructions(to_mar_dir
, manifest_file
)
347 # Compress manifest file and add to list of archived files
348 compressed_manifest
= os
.path
.join(partials_dir
, "updatev3.manifest")
349 xz_cmd(xz_compression_options(arch
), manifest_file
, compressed_manifest
)
350 archivefiles
.append("updatev3.manifest")
353 mar_channel_id
= os
.environ
.get("MAR_CHANNEL_ID", "unknown")
354 version
= get_option_from_compressed(
355 to_mar_dir
, "application.ini", "App", "Version"
357 # from_version = get_option_from_compressed(from_mar_dir, "application.ini", "App", "Version")
359 log(f
"Archive files: {' '.join(archivefiles)}", "make_partial")
371 log(f
"Running mar command with: {' '.join(mar_cmd)}", "make_partial")
373 subprocess
.run(mar_cmd
, cwd
=partials_dir
, check
=True)
374 except subprocess
.CalledProcessError
as e
:
375 log(f
"Error creating mar:\n{e.stderr}")
376 raise Exception(f
"Mar exited with code {e.returncode}")
379 "MAR_CHANNEL_ID": mar_channel_id
,
380 "appName": get_option_from_compressed(
381 from_mar_dir
, filename
="application.ini", section
="App", option
="Name"
383 "from_size": os
.path
.getsize(from_mar
),
384 "from_hash": get_hash(from_mar
),
385 "from_buildid": get_option_from_compressed(
386 from_mar_dir
, filename
="application.ini", section
="App", option
="BuildID"
388 "mar": os
.path
.basename(target_mar
),
389 "size": os
.path
.getsize(target_mar
),
390 "from_mar": from_mar_url
,
394 def download_file(url
, save_path
):
396 Downloads a file from a given URL and saves it to disk.
399 url (str): The URL to download the file from.
400 save_path (str): The path (including filename) where the file should be saved.
403 # Download the file and save it to the specified path
404 urllib
.request
.urlretrieve(url
, save_path
)
405 log(f
"File downloaded successfully: {save_path}", "download_file")
406 except urllib
.error
.URLError
as e
:
407 log(f
"Error downloading file: {url} -> {e}", "download_file")
408 raise Exception("Failed to download file.")
409 except Exception as e
:
410 log(f
"An unexpected error occurred: {url} -> {e}", "download_file")
411 raise Exception("Failed to download file.")
424 mar_manifest
= make_partial(
425 from_mar_url
, to_mar_dir
, target_mar
, workdir
, arch
, force
427 mar_manifest
["update_number"] = update_number
428 return None, mar_manifest
429 except Exception as e
:
430 log(traceback
.format_exc(), "process_single")
435 parser
= argparse
.ArgumentParser(
436 description
="Generate incremental update packages with zucchini."
439 "--from_url", help="Complete mar URLs", action
="append", required
=True
441 parser
.add_argument("--to_mar", help="To complete mar", required
=True)
444 help="To mar URL. Only used for filling the manifest.json file.",
449 parser
.add_argument("--target", help="Target partial mar location", required
=True)
451 "--workdir", help="Work directory", action
="store", required
=True
453 parser
.add_argument("--locale", help="Build locale", action
="store", required
=True)
456 help="Target Architecture",
458 choices
=BCJ_OPTIONS
.keys(),
463 help="Clobber this file in the installation. Must be a path to a file to clobber in the partial update.",
467 args
= parser
.parse_args()
469 base_workdir
= os
.path
.abspath(args
.workdir
)
471 # Multithread one partial per CPU
472 cpus
= os
.cpu_count() # This isn't optimal, but will do for now
473 log(f
"CPUs available for parallel computing: {cpus}", "main")
475 # Create target directory with locale
476 target
= os
.path
.abspath(args
.target
)
477 os
.makedirs(target
, exist_ok
=True)
479 # Decompress to_mar early
480 to_mar_dir
= os
.path
.join(base_workdir
, "to_mar")
481 mar_extract(args
.to_mar
, to_mar_dir
)
487 if not f
.cancelled():
488 futures_result
.append(f
.result())
490 futures_result
.append(("Cancelled", None))
492 with
ProcessPoolExecutor(cpus
) as executor
:
493 # TODO: should the update_number come from the task payload?
494 for update_number
, from_url
in enumerate(args
.from_url
):
495 process_workdir
= os
.path
.join(base_workdir
, str(update_number
))
496 os
.makedirs(process_workdir
, exist_ok
=True)
497 target_mar
= os
.path
.join(target
, f
"target.partial-{update_number}.mar")
498 future
= executor
.submit(
508 future
.add_done_callback(future_cb
)
509 futures
.append(future
)
511 log("Finished all processes.", "main")
514 "locale": args
.locale
,
515 # Use Gecko repo and rev from platform.ini, not application.ini
516 "repo": get_option_from_compressed(
518 filename
="platform.ini",
520 option
="SourceRepository",
522 "revision": get_option_from_compressed(
523 to_mar_dir
, filename
="platform.ini", section
="Build", option
="SourceStamp"
525 "version": get_option_from_compressed(
526 to_mar_dir
, filename
="platform.ini", section
="Build", option
="SourceStamp"
528 "to_buildid": get_option_from_compressed(
529 to_mar_dir
, filename
="application.ini", section
="App", option
="BuildID"
531 "to_hash": get_hash(args
.to_mar
),
532 "to_size": os
.stat(args
.to_mar
).st_size
,
533 "to_mar": args
.to_mar_url
,
538 for error
, manifest
in futures_result
:
540 manifest
.update(to_mar_info
)
541 results
.append(manifest
)
544 log("Process raised an exception!", "main")
549 # Write final task manifest
550 with
open(os
.path
.join(target
, "manifest.json"), "w") as fd
:
551 fd
.write(json
.dumps(results
))
553 log("Finished writing final manifest.", "main")
556 if __name__
== "__main__":