1 #! /usr/bin/env nix-shell
2 #! nix-shell -i bash -p coreutils findutils gnused nix wget
5 export LC_COLLATE
=C
# fix sort order
7 # parse files and folders from https://download.kde.org/ and https://download.qt.io/
8 # you can override this function in fetch.sh
9 function PARSE_INDEX
() {
10 cat "$1" |
grep -o -E -e '\s+href="[^"]+\.tar\.xz"' -e '\s+href="[-_a-zA-Z0-9]+/"' | cut
-d'"' -f2 |
sort |
uniq
14 echo "example use:" >&2
15 echo "cd nixpkgs/" >&2
16 echo "./maintainers/scripts/fetch-kde-qt.sh pkgs/development/libraries/qt-5/5.12" >&2
20 if ! echo "$1" |
grep -q '^pkgs/'; then
21 echo "error: path argument must start with pkgs/" >&2
25 # need absolute path for the pushd-popd block
27 echo "ok: using fetchfile $1"
29 fetchfile
="$(readlink -f "$fetchfilerel")" # resolve absolute path
30 basedir
="$(dirname "$fetchfile")"
31 basedirrel
="$(dirname "$fetchfilerel")"
32 elif [ -d "$1" ]; then
33 echo "ok: using basedir $1"
35 basedir
="$(readlink -f "$basedirrel")" # resolve absolute path
36 if ! [ -d "$basedir" ]; then
37 basedir
="$(dirname "$basedir")"
39 fetchfile
="$basedir/fetch.sh"
41 echo 'error: $1 must be file or dir' >&2
45 pkgname
=$
(basename "$basedir")
46 SRCS
="$basedir/srcs.nix"
47 srcsrel
="$basedirrel/srcs.nix"
51 if [ -n "$WGET_ARGS" ]; then # old format
52 BASE_URL
="${WGET_ARGS[0]}" # convert to new format
54 if ! echo "$BASE_URL" |
grep -q -E '^(http|https|ftp)://'; then
55 printf 'error: from WGET_ARGS, converted invalid BASE_URL: %q\n' "$BASE_URL" >&2
58 printf 'ok: from WGET_ARGS, converted BASE_URL: %q\n' "$BASE_URL"
59 elif [ -n "$BASE_URL" ]; then # new format
62 echo "error: fetch.sh must set either WGET_ARGS or BASE_URL" >&2
66 tmptpl
=tmp.fetch-kde-qt.
$pkgname.XXXXXXXXXX
68 tmp
=$
(mktemp
-d $tmptpl)
70 echo "tempdir is $tmp"
72 wgetargs
='--quiet --show-progress'
77 base_url_len
=${#BASE_URL}
81 sed -E 's,//+,/,g' |
sed -E 's,^(http|https|ftp):/,&/,'
84 while [ -n "$dirlist" ]
86 for dirurl
in $dirlist
88 echo "fetching index.html from $dirurl"
89 relpath
=$
(echo "./${dirurl:$base_url_len}" | clean_urls
)
91 indexfile
=$
(echo "$relpath/index.html" | clean_urls
)
92 wget
$wgetargs -O "$indexfile" "$dirurl"
93 echo "parsing $indexfile"
94 filedirlist
="$(PARSE_INDEX "$indexfile")"
95 filelist_next
="$(echo "$filedirlist" | grep '\.tar\.xz$' | while read file; do echo "$dirurl/$file"; done)"
96 filelist_next
="$(echo "$filelist_next" | clean_urls)"
97 [ -n "$filelist" ] && filelist
+=$
'\n'
98 filelist
+="$filelist_next"
99 dirlist
="$(echo "$filedirlist" | grep -v '\.tar\.xz$' | while read dir; do echo "$dirurl/$dir"; done || true)"
100 dirlist
="$(echo "$dirlist" | clean_urls)"
104 filecount
=$
(echo "$filelist" |
wc -l)
106 if [ -z "$filelist" ]
108 echo "error: no files parsed from $tmp/index.html"
112 echo "parsed $filecount tar.xz files:"; echo "$filelist"
114 # most time is spent here
115 echo "fetching $filecount sha256 files ..."
116 urllist
="$(echo "$filelist" | while read file; do echo "$file.sha256
"; done)"
117 # wget -r: keep directory structure
118 echo "$urllist" |
xargs wget
$wgetargs -nH -r -c --no-parent && {
119 actual
=$
(find .
-type f
-name '*.sha256' |
wc -l)
120 echo "fetching $filecount sha256 files done: got $actual files"
122 # workaround: in rare cases, the server does not provide the sha256 files
123 # for example when the release is just a few hours old
124 # and the servers are not yet fully synced
125 actual
=$
(find .
-type f
-name '*.sha256' |
wc -l)
126 echo "fetching $filecount sha256 files failed: got only $actual files"
128 # TODO fetch only missing tar.xz files
129 echo "fetching $filecount tar.xz files ..."
130 echo "$filelist" |
xargs wget
$wgetargs -nH -r -c --no-parent
132 echo "generating sha256 files ..."
133 find .
-type f
-name '*.tar.xz' |
while read src
; do
134 name
=$
(basename "$src")
135 sha256
=$
(sha256sum
"$src" | cut
-d' ' -f1)
136 echo "$sha256 $name" >"$src.sha256"
140 csv
=$
(mktemp
$tmptpl.csv
)
141 echo "writing temporary file $csv ..."
142 find .
-type f
-name '*.sha256' |
while read sha256file
; do
143 src
="${sha256file%.*}" # remove extension
144 sha256
=$
(cat $sha256file | cut
-d' ' -f1) # base16
145 sha256
=$
(nix-hash
--type sha256
--to-base32 $sha256)
147 filename
=$
(basename "$src" |
tr '@' '_')
148 nameVersion
="${filename%.tar.*}"
149 name
=$
(echo "$nameVersion" |
sed -e 's,-[[:digit:]].*,,' |
sed -e 's,-opensource-src$,,' |
sed -e 's,-everywhere-src$,,')
150 version
=$
(echo "$nameVersion" |
sed -e 's,^\([[:alpha:]][[:alnum:]]*-\)\+,,')
151 echo "$name,$version,$src,$filename,$sha256" >>$csv
154 files_before
=$
(grep -c 'src = ' "$SRCS")
156 echo "writing output file $SRCS ..."
158 # DO NOT EDIT! This file is generated automatically.
159 # Command: ./maintainers/scripts/fetch-kde-qt.sh $@
160 { fetchurl, mirror }:
165 gawk
-F , "{ print \$1 }" $csv |
sort |
uniq |
while read name
; do
166 versions
=$
(gawk
-F , "/^$name,/ { print \$2 }" $csv)
167 latestVersion
=$
(echo "$versions" |
sort -rV |
head -n 1)
168 src
=$
(gawk
-F , "/^$name,$latestVersion,/ { print \$3 }" $csv)
169 filename
=$
(gawk
-F , "/^$name,$latestVersion,/ { print \$4 }" $csv)
170 sha256
=$
(gawk
-F , "/^$name,$latestVersion,/ { print \$5 }" $csv)
174 version = "$latestVersion";
176 url = "\${mirror}/$url";
186 files_after
=$
(grep -c 'src = ' "$SRCS")
187 echo "files before: $files_before"
188 echo "files after: $files_after"
191 echo "git diff $srcsrel"
194 rm -fr $tmp >/dev
/null
196 rm -f $csv >/dev
/null