chromium,chromedriver: 129.0.6668.91 -> 129.0.6668.100
[NixPkgs.git] / pkgs / common-updater / scripts / list-directory-versions
blobc713578beefbeb04078b6f27bacb7a34afbedc62
1 #!/usr/bin/env python
3 import argparse
4 import requests
5 import os
6 import subprocess
7 import json
8 import re
9 from bs4 import BeautifulSoup
11 parser = argparse.ArgumentParser(
12 description="Get all available versions listed for a package in a site."
15 parser.add_argument(
16 "--pname",
17 default=os.environ.get("UPDATE_NIX_PNAME"),
18 required="UPDATE_NIX_PNAME" not in os.environ,
19 help="name of the package",
21 parser.add_argument(
22 "--attr-path",
23 default=os.environ.get("UPDATE_NIX_ATTR_PATH"),
24 help="attribute path of the package",
26 parser.add_argument("--url", help="url of the page that lists the package versions")
27 parser.add_argument("--file", help="file name for writing debugging information")
29 parser.add_argument("--extra-regex", help="additional regex to filter versions with")
32 if __name__ == "__main__":
33 args = parser.parse_args()
35 pname = args.pname
37 attr_path = args.attr_path or pname
39 url = args.url or json.loads(
40 subprocess.check_output(
42 "nix-instantiate",
43 "--json",
44 "--eval",
45 "-E",
46 f"with import ./. {{}}; dirOf (lib.head {attr_path}.src.urls)",
48 text=True,
52 # print a debugging message
53 if args.file:
54 with open(args.file, "a") as f:
55 f.write(f"# Listing versions for {pname} from {url}\n")
57 page = requests.get(url)
58 soup = BeautifulSoup(page.content, "html.parser")
59 links = soup.find_all("a")
60 for link in links:
61 link_url = link.get("href", None)
62 if link_url is not None:
63 match = re.fullmatch(
64 rf"(.*/)?{args.pname}-([\d.]+?(-[\d\w.-]+?)?)(\.tar)?(\.[^.]*)", link_url
66 if match:
67 version = match.group(2)
68 if (not args.extra_regex) or re.fullmatch(args.extra_regex, version):
69 print(version)