* updated x264 (4121277 -> 4613ac3c)
[t2sde.git] / scripts / Download
bloba8bfe73498812ac781d53f75b1353f104768f425
1 #!/usr/bin/env bash
3 # --- T2-COPYRIGHT-NOTE-BEGIN ---
4 # T2 SDE: scripts/Download
5 # Copyright (C) 2004 - 2024 The T2 SDE Project
6 # Copyright (C) 1998 - 2003 ROCK Linux Project
7 #
8 # This Copyright note is generated by scripts/Create-CopyPatch,
9 # more information can be found in the files COPYING and README.
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License version 2.
13 # --- T2-COPYRIGHT-NOTE-END ---
15 # Run this command from the T2 directory as scripts/Download [ options ]
17 # It enables you to download source files as described in the package
18 # definitions (optionally using a mirroring 'cache' server).
20 # This script also allows for checksum display/validation.
22 umask 022
24 . scripts/functions.in
25 . misc/output/parse-config
27 eval "$(egrep '^(sdever)=' scripts/parse-config)"
28 base=$(pwd -P)
30 if [ "$1" = '--help' ]; then
31 { echo
32 echo "Usage:"
33 echo
34 echo " scripts/Download [options] [ Package(s) ]"
35 echo " scripts/Download [options] [ Desc file(s) ]"
36 echo " scripts/Download [options] -repository Repositories"
37 echo " scripts/Download [options] { -all | -required }"
38 echo
39 echo " Where [options] is an alias for:"
40 echo " [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
41 echo " [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]"
42 echo " [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
43 echo " [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
44 echo " [ -copy ] [ -move ]"
45 echo
46 echo " On default, this script auto-detects the best T2 SDE mirror."
47 echo
48 echo " Mirrors can also be a local directories in the form of: 'file:///<dir>'"
49 echo
50 echo " scripts/Download -mk-cksum Filename(s)"
51 echo " scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
52 echo; } >&2
53 exit 1
56 # -mk-cksum mode (display T2 type package checksum): it
57 # displays the checksum T2 validates against.
59 # Commonly known compressed tarballs unpacked.
61 if [ "$1" = -mk-cksum ]; then
62 shift
63 for x; do
64 echo -n "$x: "
65 ck="sha224"
66 cksum=${ck}sum
68 if ! type -p $cksum > /dev/null; then
69 cksum=${cksum#sha} cksum=${cksum%sum}
70 cksum="shasum -a $cksum"
73 compressor="$(get_compressor "$x")"
74 if [ ! -f "$x" ]; then
75 echo "No such file."
76 elif [ "$compressor" ]; then
77 $compressor < "$x" | $cksum | cut -f1 -d' '
78 else
79 $cksum < "$x" | cut -f1 -d' '
81 done
82 exit 1
85 # Handle options passed on the command line
87 mkdir -p src/ download/; config=default
88 this_is_the_2nd_run=0
89 mirror='' checkonly=0 altdir=''
90 tryques=0 nocheck=0 options='-this_is_the_2nd_run '
91 notimeout=0 curl_options='-A T2-downloader --disable-epsv --location -f'
92 altcopy=link verbose=1 quietmirror=0
93 downloaderror=0
95 # load options from the enviroment T2DOWNOPT
96 # and then clean it to avoid duplication on children processes
98 set -- $T2DOWNOPT "$@"
99 export T2DOWNOPT=
102 while [ $# -gt 0 ]; do
103 case "$1" in
104 -this_is_the_2nd_run)
105 this_is_the_2nd_run=1
108 -cfg)
109 options="$options -cfg $2"
110 config="$2"; shift ;;
113 options="$options -q"
114 verbose=0 ;;
116 -nock)
117 # -nock skips checksum checking (don't use lightly)
118 options="$options -nock"
119 nocheck=1 ;;
121 -mirror)
122 # -mirror uses a mirror for finding source files
123 if [ "$2" = auto ]; then
124 rm -f download/Mirror-Cache
125 else
126 mkdir -p download
127 echo "$2 $sdever" > download/Mirror-Cache
128 options="$options -mirror $2"
129 mirror="$2"
131 shift ;;
133 -quiet-mirror)
134 quietmirror=1 ;;
136 -check)
137 # -check just validates the file using the checksum
138 options="$options -check"
139 checkonly=1 ;;
141 -notimeout)
142 # don't add timeout curl options
143 options="$options -notimeout"
144 notimeout=2 ;;
146 -longtimeout)
147 # don't add timeout curl options
148 options="$options -longtimeout"
149 notimeout=1 ;;
151 -curl-opt)
152 # additional curl options
153 options="$options -curl-opt $2"
154 curl_options="$curl_options `echo $2 | tr : ' '`"
155 shift ;;
157 -proxy)
158 # proxy option for curl
159 mkdir -p download
160 echo -n "$2" > download/Proxy
161 options="$options -proxy $2"
162 shift ;;
164 -proxy-auth)
165 # proxy authentication for curl - can be seen with ps!
166 mkdir -p download
167 echo -n "$2" > download/Proxy-auth
168 chmod 600 download/Proxy-auth
169 options="$options -proxy-auth $2"
170 shift ;;
172 -alt-dir)
173 # check for an alternative directory where to search for
174 # package source tarballs
175 altdir=$(cd $2; pwd -P)
176 options="$options -alt-dir $2"
177 shift ;;
179 -try-questionable)
180 # also try to download questionable URLs
181 options="$options -try-questionable"
182 tryques=1 ;;
184 -move) altcopy=move ;;
185 -copy) altcopy=copy ;;
187 *) break ;;
188 esac
189 shift
190 done
192 # Read some config values
194 target=`grep '^export SDECFG_TARGET=' config/$config/config 2>/dev/null |
195 cut -f2 -d= | tr -d "'"`
196 arch=`grep '^export SDECFG_ARCH=' config/$config/config 2>/dev/null |
197 cut -f2 -d= | tr -d "'"`
198 arch="${arch:-none}" target="${target:-none}"
201 if [ $notimeout -eq 0 ]; then
202 curl_options="$curl_options -y 20 -Y 10 --connect-timeout 60"
203 elif [ $notimeout -eq 1 ]; then
204 curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
207 # Disable checking for certificates on https downloads
208 curl_options="$curl_options -k"
210 # cksum_chk filename cksum origfile
212 # This function verifies the checksum. If it fails it renames the file
213 # to file.chksum-err and returns failure.
215 # It seams like the [ ] command has problems with comparing high numbers.
216 # That's why I'm using a text comparison here.
218 # Not doing anything if checksum is '0' or a text of 'X'.
220 cksum_chk() {
221 local y="$2"
222 [ $nocheck = 1 -o -z "${2//0/}" -o -z "${2//X/}" ] && return 0
224 # determine cksum type
225 local ck="${y%\}*}"
226 if [ "$ck" != "$y" ]; then
227 ck="${ck#\{}"
228 else case "${#ck}" in
229 56) ck="sha224" ;;
230 64) ck="sha256" ;;
231 *) ck="ck" ;;
232 esac; fi
233 y="${y#\{$ck\}}"
235 local cksum=${ck}sum
236 if ! type -p $cksum > /dev/null; then
237 cksum=${cksum#sha} cksum=${cksum%sum}
238 cksum="shasum -a $cksum"
241 local x="`$cksum "$1" | cut -f1 -d' '`"
242 if [ "$x" != "$y" ]; then
243 # Add .cksum-err extension to filename:
244 echo "Cksum ERROR: $3.cksum-err ($x)"
245 mv "$3" "$3.cksum-err"; return 1
247 return 0
250 # output (multiline) message only if we are not in quiet mode
252 echo_info() {
253 if [ "$verbose" == 1 ]; then
254 echo "$@" | sed -e 's,^,INFO: ,'
258 # output (multiline) message always
260 echo_warn() {
261 echo "$@" | sed -e 's,^,INFO: ,'
264 # Autodetect best Mirror and safe url in $mirror
266 info=
267 detect_mirror() {
268 if [ -f download/Mirror-Cache ]; then
269 read mirror mirrorver < download/Mirror-Cache
270 mirror=${mirror:-none}
271 if [ "$mirror" = "none" ]; then
272 [ "$quietmirror" != 1 ] &&
273 var_append info "
274 " "Found download/Mirror-Cache: none (using original download locations)"
275 return
276 elif [ "$mirrorver" != "$sdever" -a "$mirrorver" != "any" ]; then
277 echo_warn "Cached mirror URL in download/Mirror-Cache is outdated."
278 else
279 [ "$quietmirror" != 1 ] && var_append info "
280 " "Found cached mirror: $mirror"
281 return
284 echo_warn "Auto-detecting best mirror:"
286 echo_warn "Downloading mirror-list from: t2sde.org"
287 curl -s -S $curl_options -o src/Download-Mirror-List \
288 "https://t2sde.org/cgi-bin/t2-mirrors.cgi?$sdever"
290 bestval=0 result='No Mirror Found!'
291 me=$([ -s download/Me ] && cat download/Me)
292 while read mirror_name; do
293 if [ "${mirror_name#=}" != "$mirror_name" ]; then
294 mirror_name="${mirror_name#= }"
295 mirror_name="${mirror_name% =}"
296 read mirror_url
298 case "$mirror_name" in ($me) continue ;; esac
300 echo -n "INFO: Testing <$mirror_name> ..."
301 val="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST" \
302 -w "ok %{speed_download}" -o /dev/null)"
303 if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" ]; then
304 echo " error"
305 else
306 xval=`echo ${val#ok } | tr -d .,`; echo " $val B/s"
307 if [ "$xval" -gt "$bestval" ]; then
308 bestval=$xval mirror="${mirror_url%/}"
309 result="Saving mirror $mirror (src/Mirror-Cache)"
314 done < src/Download-Mirror-List
315 echo "$mirror $sdever" > download/Mirror-Cache
316 echo_warn "$result"
319 # download_file local-filename download-location cksum repo pkg
321 # This function decides if download directly or from a mirror,
322 # validates checksum, etc.
323 # Calls download_file_now to do the actual download.
325 download_file() {
326 # Init
328 local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
329 # Make src directory for creating tar balls
330 mkdir -p src/
331 # Remove optional '-' prefix from $location
332 [ "${location:0:1}" == '-' ] && location="${location:1}"
333 # Lock file name:
334 lkfile="src/down.lockfile.`echo $gzfile | tr / -`"
336 # Check if it's already there
338 [ -s "$gzfile" -a $checkonly != 1 ] && return 0
340 # Make locking
342 if [ -s "$lkfile" ]; then
343 echo "Found $lkfile -> skip download."
344 return 0
346 trap 'local ret=$?; rm -f "$lkfile"; return $ret' INT
347 echo $$ > "$lkfile"
349 # Check if we only like to test the cksum(s)
351 if [ $checkonly = 1 ]; then
352 if [ ! -f "$gzfile" ]; then
353 echo "File missing: $gzfile"
354 rm -f "$lkfile"; trap INT; downloaderror=1; return
356 if [ -z "${cksum##X*}" ]; then
357 echo "No checksum (ignore): $gzfile"
358 rm -f "$lkfile"; trap INT; return
360 if [ "$cksum" = 0 ]; then
361 echo "No checksum (missing): $gzfile"
362 rm -f "$lkfile"; trap INT; return
365 elif [ -s "$gzfile" ]; then
366 echo; echo "Already downloaded: $pkg:$gzfile"
368 else
369 [ "$info" ] && echo_info "$info" && info=
370 echo; echo "Downloading $pkg:$gzfile"
372 # Existing *.cksum-err
374 if [ -s "$gzfile.cksum-err" ]; then
375 # cksum-err file alread exists:
376 echo "ERROR: found: $gzfile.cksum-err"
377 echo "ERROR: That means that we downloaded the" \
378 "file already and it had an"
379 echo "ERROR: incorrect checksum. Remove the" \
380 "*.cksum-err file to force a"
381 echo "ERROR: new download of that file."
382 rm -f "$lkfile"; trap INT; downloaderror=1; return 1
385 # Existing *.extck-err
387 if [ -s "$gzfile.extck-err" ]; then
388 # extck-err file alread exists:
389 echo "ERROR: found: $gzfile.extck-err"
390 echo "ERROR: That means that we downloaded the" \
391 "file already and it's content"
392 echo "ERROR: did not match it's filename extension." \
393 "Remove the *.extck-err file"
394 echo "ERROR: to force a new download of that file."
395 rm -f "$lkfile"; trap INT; downloaderror=1; return 1
398 # Questionable URL
400 if [ "$location" != "${location#\?}" ]; then
401 if [ "$tryques" = 0 ]; then
402 echo "ERROR: URL is marked as questionable." \
403 "Not downloading this file."
404 rm -f "$lkfile"; trap INT; return 1
405 else
406 echo "WARNING: URL is marked as questionable." \
407 "Downloading it anyways."
408 location="${location#\?}"
412 # Make directory (if required)
414 if [ ! -d `dirname "$gzfile"` ]; then
415 mkdir -p `dirname "$gzfile"`
418 # Alternative Directory
420 if [ "$altdir" ]; then
421 altfile=$(find $altdir/ -name `basename $gzfile` | head -n 1)
422 else
423 altfile=""
426 if [ "$altfile" ]; then
428 echo "Found `basename $gzfile` as: $altfile"
429 if [ "$altcopy" = 'link' ]; then
430 cp -lv $altfile $gzfile
431 elif [ "$altcopy" = 'copy' ]; then
432 cp -v $altfile $gzfile
433 elif [ "$altcopy" = 'move' ]; then
434 mv -v $altfile $gzfile
436 gzfile="$gzfile"
438 else
439 # Mirroring
441 read mirror mirrorver < download/Mirror-Cache
443 if [ -n "$mirror" -a "$mirror" != "none" -a -z "${gzfile##download/mirror/*}" ]; then
444 # try to use mirror
445 if ! download_file_now "!$mirror/${gzfile#download/mirror/}" $gzfile; then
446 echo "INFO: Download from mirror failed, trying original URL."
447 download_file_now "$location" $gzfile || downloaderror=1
448 else
449 gzfile="$gzfile"
451 else
452 # don't want to use mirror
453 download_file_now "$location" $gzfile downloaderror=1
457 if [ ! -s "$gzfile" ]; then
458 rm -f "$lkfile"; trap INT; return 1
462 # unsign .gpg file
463 if [[ $gzfile = *.gpg ]]; then
464 gzfile=${gzfile%.gpg}
465 if [ -f $gzfile.gpg ]; then
466 echo "Unsigning GnuPG file: $gzfile.gpg"
467 gpg $gzfile.gpg
469 if [ ! -f $gzfile ]; then
470 echo "Unsigning failed"
471 rm -f "$lkfile"; trap INT; return 1
475 echo "Checksum testing: $gzfile"
476 local compressor="$(get_compressor "$gzfile")"
477 if [ "$compressor" ]; then
478 # TODO: w/o temp file
479 $compressor < "$gzfile" > src/down.$$.dat
480 cksum_chk src/down.$$.dat $cksum "$gzfile" || downloaderror=1
481 rm -f src/down.$$.dat
482 else
483 cksum_chk "$gzfile" $cksum "$gzfile" || downloaderror=1
486 # Free Lock and finish
488 rm -f "$lkfile"; trap INT; return 0
491 # download_file_now location filename
493 # This function executes the actual download using curl.
495 download_file_now() {
496 local location="$1" gzfile="$2" curlret=0
498 # Create URL
500 case "$location" in
501 manual://*) url="$location" ;;
502 !*) url="${location#!}" ;;
503 *) url="${location%/*}/${gzfile##*/}" ;;
504 esac
506 # Download
508 case "$url" in
509 manual://*)
510 # Determine if the file has already been downloaded
511 # manually. For this we first look in $HOME then in
512 # download/manual.
513 downloadpath=${altdir:-$HOME}
514 downloadfile="${gzfile##*/}"
515 if [ -e $downloadpath/$downloadfile ]; then
516 location="file://$downloadpath/"
517 else
518 location="http://${url#manual://}"
519 # No manual download has taken place yet.
520 # So inform the user to do so.
521 cat <<-EOT
522 The file $downloadfile can not be fetched automatically
523 please visit: $location
524 and download it manually into $HOME or somewhere else using -alt-dir
526 return 1
529 # Re-use this function with a modified download location.
530 download_file_now "$location" $gzfile
531 return $?
533 http://*|https://*|ftp://*|file://*)
534 if [ -s "$gzfile.incomplete" ]; then
535 echo "INFO: Trying to resume previous download .."
536 resume="-C -"
537 else
538 resume=""
541 [ -s download/Translations ] && trfile=download/Translations || trfile=misc/share/DownloadTranslations
542 trurl="$(echo "$url" | sed -f $trfile)"
543 if [ -n "$trurl" -a "$trurl" != "$url" ]; then
544 echo "INFO: URL translated."
545 url="$trurl"
547 unset trurl trfile
549 curl --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
550 curlret="$?"
552 if [ "$resume" ] &&
553 [ $curlret -eq 33 -o $curlret -eq 36 ]; then
554 echo "INFO: Resuming download not possible. -> Overwriting old file."
555 rm -f "$gzfile.incomplete"
556 curl --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
557 curlret="$?"
560 if [ $curlret -ne 0 ]; then
561 case "$curlret" in
563 echo "WARNING: Got only some of the file. A re-run of $0"
564 echo "WARNING: is required to complete the download." ;;
565 22) : ;; # 404 not found
566 130)
567 echo -e '\rWARNING: CURL got a SIGINT' \
568 "(someone pressed Ctrl-C). A re-run of"
569 echo "WARNING: $0 is required to complete the download."; sleep 1 ;;
571 echo "$curlret $gzfile $url" \
572 >> src/Download-Errors
573 echo -e "\rERROR: CURL Returned error: $curlret" ;;
574 esac
575 return 1
576 elif [ ! -s "$gzfile.incomplete" ]; then
577 echo "0 $gzfile $url" >> src/Download-Errors
578 echo "ERROR: CURL returned success but we have no data!"
579 curlret=1
580 else
581 case "$gzfile" in
582 *.br|*.tbr)
583 typeexpr="data" ;;
584 *.gz|*.tgz)
585 typeexpr="gzip compressed data" ;;
586 *.bz2|*.tbz2|*.tbz)
587 typeexpr="bzip2 compressed data" ;;
588 *.lz|*.tlz)
589 typeexpr="lzip compressed data" ;;
590 *.lzma)
591 typeexpr="LZMA compressed data" ;;
592 *.zst|*.tzst|*.zstd)
593 typeexpr="Zstandard compressed data" ;;
594 *.Z|*.tZ)
595 typeexpr="compress'd data" ;;
596 *.zip)
597 typeexpr="Zip archive data" ;;
598 *.jar)
599 typeexpr="Java archive data (JAR)" ;;
600 *.tar)
601 typeexpr="tar archive" ;;
602 *.xz|*.txz)
603 typeexpr="[xX][zZ] compressed data" ;;
605 echo "WARNING: Unknown file extension: $gzfile"
606 typeexpr="." ;;
607 esac
608 case $(file "$gzfile.incomplete") in
609 *$typeexpr*)
610 mv "$gzfile"{.incomplete,}
613 echo "ERROR: File type does not match filename ($typeexpr)!"
614 mv "$gzfile"{.incomplete,.extck-err}
616 esac
620 protocol="${url%%://*}"
622 # we need to use $location - $url is already mangled above -ReneR
623 # $protocol://$url $options
624 url="`echo "$location" | sed "s,$protocol://\([^ ]*\).*,\1,"`"
625 options="`echo "$location" | cut -s -d' ' -f2-`"
627 case "$protocol" in
628 cvs)
629 # the first option is the module name
630 module="${options%% *}"
631 options="${options#* }"
632 cmdline="cvs -z4 -Q -d $url co -P $options $module"
634 # sometimes cvs wants to read ~/.cvspass just for fun ..
635 touch $HOME/.cvspass
637 svn|svn\+*) # allow any svn+ other transport and strip the svn+ part off
638 url="${protocol#svn+}://$url"
639 options="${options## *}"
640 if [ "$options" == "" -o "${options:0:1}" == "-" ]; then
641 # the module is the last dir of $url,
642 # w/ or wo/ trailing slash (/)
643 module="${url%%/}"
644 module="${module##*/}"
645 else
646 # the first option is the module name
647 module="${options%% *}"
648 [ "$module" = "$options" ] &&
649 options= || options="${options#* }"
651 cmdline="svn export $options $url $module"
653 git|git\+*) # allow any git+ other transport and strip the git+ part off
654 url="${protocol#git+}://$url"
656 module="${url##*/}"
657 cmdline="git clone --recursive $git_options $url $module"
658 options="${options#* }"
659 [ -n $options ] && cmdpp="(cd $module; git checkout $options)"
661 hg|hg\+*) # allow any hg+ other transport and strip the hg+ part off
662 url="${protocol#hg+}://$url"
664 module="${url##*/}"
665 cmdline="hg clone $url $module"
666 options="${options#* }"
667 [ -n $options ] && cmdpp="(cd $module; hg clone $options)"
670 echo "$cmdclient unrecognized!"
671 return 1
673 esac
675 cvsdir="src/down.${protocol}dir.`echo $gzfile | tr / -`"
676 saved_pwd=$PWD; mkdir -p $cvsdir; cd $cvsdir
678 echo "$cmdline"
680 $cmdline || touch .cvs_error
681 } &> .cvs_output &
683 while fuser .cvs_output &> /dev/null; do
684 echo -ne `nice du -sh 2> /dev/null | \
685 cut -f1` 'downloaded from archive so far ... \r'
686 sleep 3
687 done
689 if [ -f .cvs_error ]; then
690 cd $saved_pwd; rm -rf $cvsdir
691 echo -e "\nError during checkout."
692 return 1
695 echo `du -sh 2> /dev/null | cut -f1` 'downloaded from archive (download finished).'
697 if [ `echo * | wc -w` -gt 1 ]; then
698 # multi-module module
699 echo "Multi-module package detected, relocating ..."
700 mkdir t2-module.$$
701 for x in *; do
702 [ "$x" != "t2-module.$$" ] && mv -f $x t2-module.$$/
703 done
704 mkdir -p "$module"
705 mv -f t2-module.$$/* "$module"
706 rm -f t2-module.$$
709 cd `dirname $module`
710 tarname="`basename $gzfile`"
711 echo "Preparing files for final tarball."
712 [ -n "$cmdpp" ] && eval "$cmdpp"
714 if [ `find -type f | wc -l` -gt 4 ]; then
715 local compressor="$(get_compressor $tarname)"
716 # explicitly stable sort files
717 find $module | sort |
718 egrep -v -e '/(CVS|.svn|.git|.hg)$' -e '/(CVS|.svn|.git|.hg)/' |
719 TZ=UTC tar -c --owner root --group root --mtime 20000101 \
720 --no-recursion --files-from=- | ${compressor/ -d/} > $tarname
721 mv $tarname $saved_pwd/$gzfile
722 else
723 echo "Too few files - assuming checkout failure."
724 curlret=1
727 cd $saved_pwd; rm -rf $cvsdir
729 esac
730 return $curlret
733 list_dtags() {
735 grep -aH '^\[D\] ' package/*/*/*.desc
736 grep -aH '^\[D\] ' {architecture,target}/*/package/*/*.desc
737 grep -aH '^[X0-9a-z]' target/*/download.txt 2> /dev/null |
738 sed 's,:,:[D] ,'
739 } | column_clean
742 list_cksums() {
743 trap '' INT
745 # we know we only have single spaces due to list_dtags' column_clean
746 list_dtags | sed -n \
747 -e 's,[^ ]* \([X0-9a-z]*\) \(.\)\([^ ]*\) -.*,\1 download/local/\2/\2\3,p' \
748 -e 's,[^ ]* \([X0-9a-z]*\) \(.\)\([^ ]*\) [^-].*,\1 download/mirror/\2/\2\3,p'
750 trap INT
753 list() {
754 trap '' INT
755 list_cksums | cut -f2- -d' '
756 trap INT
759 list_unknown() {
760 trap '' INT
761 mkdir -p src/; list > src/down.$$.lst
762 ls download/{Proxy,Proxy-auth,Me,Mirror-Cache} \
763 download/mirror/{README,DOWNTEST,LAST-UPDATE} \
764 >> src/down.$$.lst 2> /dev/null
765 find download/* -type f -o -type l 2> /dev/null |
766 while read fn; do
767 grep -qx "$fn" src/down.$$.lst || echo "Unknown file: $fn"
768 done
769 rm -f src/down.$$.lst
770 trap INT
773 list_missing() {
774 trap '' INT
775 list |
776 while read fn; do
777 [ -f "$fn" ] || echo "$fn"
778 done
779 trap INT
782 repository() {
783 for repository; do
784 packages `echo package/$repository/*/*.desc`
785 done
788 required() {
789 # Choosen config must exist
791 if [ ! -f config/$config/packages ]; then
792 echo "ERROR: Config $config doesn't exist."
793 echo "ERROR: try scripts/Config -cfg $config first."
794 exit 1
797 while read on a b repo pkg c; do
798 package $pkg
799 done < <(grep '^X' config/$config/packages)
801 targetchain="$target" x="$target"
802 while [ -f "target/$x/extends" ]; do
803 x="$(< target/$x/extends)"
804 targetchain="$targetchain $x"
805 done
807 for target in $targetchain; do
808 if [ -f target/$target/download.txt ]; then
809 while read cksum file url; do
810 download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
811 done < target/$target/download.txt
813 done
816 all() {
817 trap '' INT
818 list_dtags | cut -d ' ' -f 2- | while read cksum file url; do
819 download_file "`source_file cksum $file "$url"`" "$url" "$cksum"
820 done
821 trap INT
824 package() {
825 local pkg="$1"
826 detect_confdir # relies on $pkg being set
827 if [ ! "$confdir" ]; then
828 echo "Error: Package $pkg not found!"
829 downloaderror=1
830 return 1
832 parse_desc $pkg # relies on $pkg and $confdir being set
833 while read cksum file url; do
834 download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$repo" "$pkg"
835 done < <(echo "$desc_D")
838 packages() {
839 local descfile
840 for arg; do
841 case "$arg" in
842 target/*)
843 if [ ! -f $arg ]; then
844 echo "Skipping \"$arg\" (not found)!"
845 continue
848 target="`echo $arg | cut -f2 -d/`"
850 while read cksum file url; do
851 download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
852 done < <(cat $arg)
855 if [ "${arg%.desc}" != "$arg" ]; then
856 arg="`echo $arg | cut -f3 -d/`"; fi
859 # active extensions
860 local extender=
862 # pkg_*_{pre,post}.conf is only activated if extender
863 # is enabled on $config/packages, so we will only
864 # download files of those extenders
866 for extender in `ls -1 package/*/*/pkg_${arg}_{pre,post}.conf 2> /dev/null |
867 cut -d/ -f3 | sort -u`; do
868 if grep -q "^X .* $extender " \
869 config/$config/packages; then
870 echo_info "Also downloading $extender ..."
871 package $extender
873 done
874 package $arg
876 esac
877 done
880 # Things to do only for downloading
882 if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
883 # Set proxy information
884 if [ -f download/Proxy ]; then
885 proxy="$(< download/Proxy)"
886 if [ "$proxy" ]; then
887 curl_options="$curl_options --proxy $proxy"
888 else
889 echo "INFO: No proxy information, removing: download/Proxy"
890 rm download/Proxy
893 if [ -f download/Proxy-auth ]; then
894 proxyauth="$(< download/Proxy-auth)"
895 if [ "$proxyauth" ]; then
896 curl_options="$curl_options --proxy-user $proxyauth"
897 git_options="-c http.proxy=http://$proxyauth@$proxy"
898 else
899 echo "INFO: No proxy-auth information, removing: download/Proxy-auth"
900 rm download/Proxy-auth
904 # We need curl
905 if [ -z "`type -p curl`" ]; then
906 echo "ERROR: we need \`curl\` installed and available in \$PATH to proceed."
907 exit 2
910 # Thing to do only once
912 if [ $this_is_the_2nd_run = 0 ]; then
913 # am i using a proxy?
914 if [ "$proxy" ]; then
915 echo "INFO: Setting proxy to: $proxy"
917 if [ "$proxyauth" ]; then
918 echo "INFO: Setting proxy authentication information."
921 # do mirror detection
922 detect_mirror
926 case "$1" in
927 -list) list ;;
928 -list-unknown) list_unknown ;;
929 -list-missing) list_missing ;;
930 -list-cksums) list_cksums ;;
932 -required) required ;;
933 -all) all ;;
935 -repository) shift; repository "$@" ;;
937 -*|"") exec $0 --help ;;
939 *) packages "$@" ;;
940 esac
942 exit $downloaderror