3 # --- T2-COPYRIGHT-NOTE-BEGIN ---
4 # T2 SDE: scripts/Download
5 # Copyright (C) 2004 - 2021 The T2 SDE Project
6 # Copyright (C) 1998 - 2003 ROCK Linux Project
8 # This Copyright note is generated by scripts/Create-CopyPatch,
9 # more information can be found in the files COPYING and README.
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License version 2.
13 # --- T2-COPYRIGHT-NOTE-END ---
15 # Run this command from the T2 directory as scripts/Download [ options ]
17 # It enables you to download source files as described in the package
18 # definitions (optionally using a mirroring 'cache' server).
20 # This script also allows for checksum display/validation.
24 .
scripts
/functions.
in
25 . misc
/output
/parse-config
27 eval "$(egrep '^(sdever)=' scripts/parse-config)"
30 if [ "$1" = '--help' ]; then
34 echo " scripts/Download [options] [ Package(s) ]"
35 echo " scripts/Download [options] [ Desc file(s) ]"
36 echo " scripts/Download [options] -repository Repositories"
37 echo " scripts/Download [options] { -all | -required }"
39 echo " Where [options] is an alias for:"
40 echo " [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
41 echo " [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]"
42 echo " [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
43 echo " [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
44 echo " [ -copy ] [ -move ]"
46 echo " On default, this script auto-detects the best T2 SDE mirror."
48 echo " Mirrors can also be a local directories in the form of: 'file:///<dir>'"
50 echo " scripts/Download -mk-cksum Filename(s)"
51 echo " scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
56 # -mk-cksum mode (display T2 type package checksum): it
57 # displays the checksum T2 validates against.
59 # Currently bz2, tbz2, gz, tgz, Z, xz are unpacked
61 if [ "$1" = -mk-cksum ]; then
66 if [ ! -f "$x" ]; then
68 elif [ "${x%.bz2}" != "$x" -o "${x%.tbz2}" != "$x" -o "${x%.tbz}" != "$x" ]; then
69 bunzip2
< "$x" |
${ck}sum | cut
-f1 -d' '
70 elif [ "${x%.zst}" != "$x" -o "${x%.tzst}" != "$x" -o "${x%.zstd}" != "$x" ]; then
71 zstd
-d < "$x" |
${ck}sum | cut
-f1 -d' '
72 elif [ "${x%.xz}" != "$x" ]; then
73 xzcat
< "$x" |
${ck}sum | cut
-f1 -d' '
74 elif [ "${x%.gz}" != "$x" -o "${x%.tgz}" != "$x" ]; then
75 gunzip
< "$x" |
${ck}sum | cut
-f1 -d' '
76 elif [ "${x%.Z}" != "$x" ]; then
77 uncompress < "$x" |
${ck}sum | cut
-f1 -d' '
79 ${ck}sum < "$x" | cut
-f1 -d' '
85 # Handle options passed on the command line
87 mkdir
-p src
/ download
/; config
=default
89 mirror
=''; checkonly
=0; altdir
=''
90 tryques
=0; nocheck
=0; options
='-this_is_the_2nd_run '
91 notimeout
=0; curl_options
='-A T2-downloader --disable-epsv --location -f'
92 altcopy
=link
; verbose
=1
95 # load options from the enviroment T2DOWNOPT
96 # and then clean it to avoid duplication on children processes
98 set -- $T2DOWNOPT "$@"
102 while [ $# -gt 0 ]; do
105 -this_is_the_2nd_run)
106 this_is_the_2nd_run
=1
110 options
="$options -cfg $2"
111 config
="$2"; shift ;;
114 options
="$options -q"
118 # -nock skips checksum checking (don't use lightly)
119 options
="$options -nock"
123 # -mirror uses a mirror for finding source files
124 if [ "$2" = auto
]; then
125 rm -f download
/Mirror-Cache
128 echo "$2 $sdever" > download
/Mirror-Cache
129 options
="$options -mirror $2"
135 # -check just validates the file using the checksum
136 options
="$options -check"
140 # don't add timeout curl options
141 options
="$options -notimeout"
145 # don't add timeout curl options
146 options
="$options -longtimeout"
150 # additional curl options
151 options
="$options -curl-opt $2"
152 curl_options
="$curl_options `echo $2 | tr : ' '`"
156 # proxy option for curl
158 echo -n "$2" > download
/Proxy
159 options
="$options -proxy $2"
163 # proxy authentication for curl - can be seen with ps!
165 echo -n "$2" > download
/Proxy-auth
166 chmod 600 download
/Proxy-auth
167 options
="$options -proxy-auth $2"
171 # check for an alternative directory where to search for
172 # package source tarballs
173 altdir
=$
( cd $2; pwd -P )
174 options
="$options -alt-dir $2"
178 # also try to download questionable URLs
179 options
="$options -try-questionable"
182 -move) altcopy
=move
;;
183 -copy) altcopy
=copy
;;
190 # Read some config values
192 target
=`grep '^export SDECFG_TARGET=' config/$config/config 2>/dev/null |
193 cut -f2 -d= | tr -d "'"`
194 arch
=`grep '^export SDECFG_ARCH=' config/$config/config 2>/dev/null |
195 cut -f2 -d= | tr -d "'"`
196 arch
="${arch:-none}"; target
="${target:-none}"
199 if [ $notimeout -eq 0 ]; then
200 curl_options
="$curl_options -y 20 -Y 10 --connect-timeout 60"
201 elif [ $notimeout -eq 1 ]; then
202 curl_options
="$curl_options -y 60 -Y 1 --connect-timeout 300"
205 # Disable checking for certificates on https downloads
206 curl_options
="$curl_options -k"
208 # cksum_chk filename cksum origfile
210 # This function verifies the checksum. If it fails it renames the file
211 # to file.chksum-err and returns failure.
213 # It seams like the [ ] command has problems with comparing high numbers.
214 # That's why I'm using a text comparison here.
216 # Not doing anything if checksum is '0' or a text of 'X'.
220 [ $nocheck = 1 -o -z "${2//0/}" -o -z "${2//X/}" ] && return 0
222 # determine cksum type
224 if [ "$ck" != "$y" ]; then
226 else case "${#ck}" in
233 local x
="`${ck}sum "$1" | cut -f1 -d' '`"
234 if [ "$x" != "$y" ]; then
235 # Add .cksum-err extension to filename:
236 echo "Cksum ERROR: $3.cksum-err ($x)"
237 mv "$3" "$3.cksum-err"; return 1
242 # output (multiline) message only if we are not in quiet mode
245 if [ "$verbose" == 1 ]; then
246 echo "$@" |
sed -e 's,^,INFO: ,'
250 # output (multiline) message always
253 echo "$@" |
sed -e 's,^,INFO: ,'
256 # Autodetect best Mirror and safe url in $mirror
259 if [ -f download
/Mirror-Cache
]; then
260 echo_info
"To force a new mirror auto-detection, remove: download/Mirror-Cache"
261 read mirror mirrorver
< download
/Mirror-Cache
262 mirror
=${mirror:-none}
263 if [ "$mirror" = "none" ]; then
264 echo_info
"Found download/Mirror-Cache: none" \
265 "(use the original download locations)"
267 elif [ "$mirrorver" != "$sdever" -a "$mirrorver" != "any" ]; then
268 echo_warn
"Cached mirror URL in download/Mirror-Cache is outdated."
270 echo_info
"Found cached mirror URL in download/Mirror-Cache:"
275 echo_warn
"Auto-detecting best mirror:"
277 echo_warn
"Downloading mirror-list from: t2sde.org"
278 curl
-s -S $curl_options -o src
/Download-Mirror-List \
279 "https://t2sde.org/cgi-bin/t2-mirrors.cgi?$sdever"
281 bestval
=0; result
='No Mirror Found!'
282 me
=$
( [ -s download
/Me
] && cat download
/Me
)
283 while read mirror_name
; do
284 if [ "${mirror_name#=}" != "$mirror_name" ]; then
285 mirror_name
="${mirror_name#= }"
286 mirror_name
="${mirror_name% =}"
289 case "$mirror_name" in ($me) continue ;; esac
291 echo -n "INFO: Testing <$mirror_name> ..."
292 val
="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST
" \
293 -w "ok
%{speed_download
}" -o /dev/null)"
294 if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" ]; then
297 xval
=`echo ${val#ok } | tr -d .,`; echo " $val B/s"
298 if [ "$xval" -gt "$bestval" ]; then
299 bestval
=$xval; mirror
="${mirror_url%/}"
300 result
="Using: <$mirror>"
305 done < src
/Download-Mirror-List
306 echo "$mirror $sdever" > download
/Mirror-Cache
310 # download_file local-filename download-location cksum repo pkg
312 # This function decides if download directly or from a mirror,
313 # validates checksum, etc.
314 # Calls download_file_now to do the actual download.
320 local gzfile
="$1" location
="$2" cksum="$3" repo
="$4" pkg
="$5"
321 # Make src directory for creating tar balls
324 bzfile
="`bz2filename "$gzfile"`"
325 # Remove optional '-' prefix from $location
326 [ "${location:0:1}" == '-' ] && location
="${location:1}"
328 lkfile
="src/down.lockfile.`echo $bzfile | tr / -`"
330 # Check if it's already there
332 [ -s "$bzfile" -a $checkonly != 1 ] && return 0
336 if [ -s "$lkfile" ]; then
337 echo "Found $lkfile -> skip download."
340 trap 'rm -f "$lkfile"' INT
343 # Check if we only like to test the cksum(s)
345 if [ $checkonly = 1 ]; then
347 if [ ! -f "$bzfile" ]; then
348 echo "File missing: $bzfile"
349 rm -f "$lkfile"; trap INT
; return 1
351 if [ -z "${cksum##X*}" ]; then
352 echo "No checksum (ignore): $bzfile"
353 rm -f "$lkfile"; trap INT
; return 1
355 if [ "$cksum" = 0 ]; then
356 echo "No checksum (missing): $bzfile"
357 rm -f "$lkfile"; trap INT
; return 1
360 elif [ -s "$gzfile" ]; then
362 echo; echo "Already downloaded: $pkg:$gzfile"
366 echo; echo "Downloading $pkg:$gzfile"
368 # Existing *.cksum-err
370 if [ -s "$gzfile.cksum-err" ]; then
371 # cksum-err file alread exists:
372 echo "ERROR: found: $gzfile.cksum-err"
373 echo "ERROR: That means that we downloaded the" \
374 "file already and it had an"
375 echo "ERROR: incorrect checksum. Remove the" \
376 "*.cksum-err file to force a"
377 echo "ERROR: new download of that file."
378 rm -f "$lkfile"; trap INT
; return 1
381 # Existing *.extck-err
383 if [ -s "$gzfile.extck-err" ]; then
384 # extck-err file alread exists:
385 echo "ERROR: found: $gzfile.extck-err"
386 echo "ERROR: That means that we downloaded the" \
387 "file already and it's content"
388 echo "ERROR: did not match it's filename extension." \
389 "Remove the *.extck-err file"
390 echo "ERROR: to force a new download of that file."
391 rm -f "$lkfile"; trap INT
; return 1
396 if [ "$location" != "${location#\?}" ]; then
397 if [ "$tryques" = 0 ]; then
398 echo "ERROR: URL is marked as questionable." \
399 "Not downloading this file."
400 rm -f "$lkfile"; trap INT
; return 1
402 echo "WARNING: URL is marked as questionable." \
403 "Downloading it anyways."
404 location
="${location#\?}"
408 # Make directory (if required)
410 if [ ! -d `dirname "$bzfile"` ]; then
411 mkdir
-p `dirname "$bzfile"`
414 # Alternative Directory
416 if [ "$altdir" ]; then
417 altfile
=$
(find $altdir/ -name `basename $bzfile` |
head -n 1)
422 # FIXME: compatibility, can be removed sooner or later ...
423 # Check old download dir layout
424 if [ -z "$altfile" ]; then
425 if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
426 altfile
="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
430 if [ "$altfile" ]; then
432 echo "Found `basename $bzfile` as: $altfile"
433 if [ "$altcopy" = 'link' ]; then
434 cp -lv $altfile $bzfile
435 elif [ "$altcopy" = 'copy' ]; then
436 cp -v $altfile $bzfile
437 elif [ "$altcopy" = 'move' ]; then
438 mv -v $altfile $bzfile
446 read mirror mirrorver
< download
/Mirror-Cache
448 if [ -n "$mirror" -a "$mirror" != "none" -a -z "${bzfile##download/mirror/*}" ]; then
451 if ! download_file_now
"!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
452 echo "INFO: download from mirror failed, trying original URL."
453 download_file_now
"$location" $gzfile $bzfile \
460 # don't want to use mirror
461 download_file_now
"$location" $gzfile $bzfile \
466 if [ ! -s "$gzfile" ]; then
467 rm -f "$lkfile"; trap INT
; return 1
472 if [[ $gzfile = *.gpg
]]; then
473 gzfile
=${gzfile%.gpg}
474 if [ -f $gzfile.gpg
]; then
475 echo "unsigning GnuPG file: $gzfile.gpg"
478 if [ ! -f $gzfile ]; then
479 echo "unsigning failed"
480 rm -f "$lkfile"; trap INT
; return 1
485 # Convert a .gz to .bz2 and test checksum
488 echo "compressing gzip file + cksum-test: $gzfile"
489 gunzip
< "$gzfile" > src
/down.$$.dat
490 if cksum_chk src
/down.$$.dat
$cksum "$gzfile"; then
491 $compressor < src
/down.$$.dat
> "$bzfile"; rm -f "$gzfile"
493 rm -f src
/down.$$.dat
495 # Convert a .lzip to .bz2 and test checksum
498 echo "compressing lzip file + cksum-test: $gzfile"
499 lzip
-d < "$gzfile" > src
/down.$$.dat
500 if cksum_chk src
/down.$$.dat
$cksum "$gzfile"; then
501 $compressor < src
/down.$$.dat
> "$bzfile"; rm -f "$gzfile"
503 rm -f src
/down.$$.dat
505 # Compress plain .tar .gz to .bz2 and test checksum
508 echo "compressing tar file + cksum-test: $gzfile"
509 if cksum_chk
"$gzfile" $cksum "$gzfile"; then
510 $compressor < "$gzfile" > "$bzfile"; rm -f "$gzfile"
513 # Convert a .Z to .bz2 and test checksum
515 echo "compressing Z file + cksum-test: $gzfile"
516 uncompress < "$gzfile" > src
/down.$$.dat
517 if cksum_chk src
/down.$$.dat
$cksum "$gzfile"; then
518 $compressor < src
/down.$$.dat
> "$bzfile"; rm -f "$gzfile"
520 rm -f src
/down.$$.dat
522 # Convert a .xz to .bz2 and test checksum
524 echo "compressing xz file + cksum-test: $gzfile"
525 xzcat
< "$gzfile" > src
/down.$$.dat
526 if cksum_chk src
/down.$$.dat
$cksum "$gzfile"; then
527 $compressor < src
/down.$$.dat
> "$bzfile"; rm -f "$gzfile"
529 rm -f src
/down.$$.dat
531 # Convert a .bz2 to .bz2 and test checksum
533 echo "compressing bzip2 file + cksum-test: $gzfile"
534 bzcat
< "$gzfile" > src
/down.$$.dat
535 if cksum_chk src
/down.$$.dat
$cksum "$gzfile"; then
536 $compressor < src
/down.$$.dat
> "$bzfile"; rm -f "$gzfile"
538 rm -f src
/down.$$.dat
540 # Execute a cksum test on a zstd file
543 echo "cksum-test (zstd): $bzfile"
544 if [ $nocheck = 0 ]; then
545 zstdcat
< "$bzfile" > src
/down.$$.dat
546 cksum_chk src
/down.$$.dat
$cksum "$bzfile" \
549 rm -f src
/down.$$.dat
551 # Execute a cksum test on a raw data file
554 echo "cksum-test (raw): $gzfile"
555 cksum_chk
"$gzfile" $cksum "$gzfile" || downloaderror
=1
559 # Free Lock and finish
561 rm -f "$lkfile"; trap INT
; return 0
564 # download_file_now location remote_filename local_filename
566 # This function executes the actual download using curl.
568 download_file_now
() {
569 local location
="$1" gzfile
="$2" bzfile
="$3" curlret
=0
574 manual
://*) url
="$location" ;;
575 !*) url
="${location#!}" ;;
576 *) url
="${location%/*}/${gzfile##*/}" ;;
583 # Determine if the file has already been downloaded
584 # manually. For this we first look in $HOME then in
586 downloadpath
=${altdir:-$HOME}
587 downloadfile
="${gzfile##*/}"
588 if [ -e $downloadpath/$downloadfile ]; then
589 location
="file://$downloadpath/"
591 location
="http://${url#manual://}"
592 # No manual download has taken place yet.
593 # So inform the user to do so.
595 The file $downloadfile can not be fetched automatically
596 please visit: $location
597 and download it manually into $HOME or somewhere else using -alt-dir
602 # I am too lazy to do the copy and conversion myself,
603 # so I use this function again with a modified
605 download_file_now
"$location" $gzfile $bzfile
608 http
://*|https
://*|
ftp://*|
file://*)
609 if [ -s "$gzfile.incomplete" ]; then
610 echo "INFO: Trying to resume previous download .."
616 [ -s download
/Translations
] && trfile
=download
/Translations || trfile
=misc
/share
/DownloadTranslations
617 trurl
="$( echo "$url" | sed -f $trfile )"
618 if [ -n "$trurl" -a "$trurl" != "$url" ]; then
619 echo "INFO: url translated."
624 curl
-w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
628 [ $curlret -eq 33 -o $curlret -eq 36 ]; then
629 echo "INFO: Resuming download not possible. ->" \
630 "Overwriting old file."
631 rm -f "$gzfile.incomplete"
632 curl
-w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
636 if [ $curlret -ne 0 ]; then
639 echo "WARNING: Got only some of the" \
640 "file. A re-run of $0"
641 echo "WARNING: is required to complete" \
644 echo -e '\rWARNING: CURL got a SIGINT' \
645 "(someone pressed Ctrl-C). A re-run of"
646 echo "WARNING: $0 is required to complete" \
647 "the download."; sleep 1 ;;
649 echo "$curlret $gzfile $url" \
650 >> src
/Download-Errors
651 echo -e '\rERROR: CURL Returned Error' \
652 "$curlret. Please read" \
653 "the curl manpage." ;;
656 elif [ ! -s "$gzfile.incomplete" ]; then
657 echo "0 $gzfile $url" >> src
/Download-Errors
658 echo "ERROR: CURL returned success but" \
664 typeexpr
="gzip compressed data" ;;
666 typeexpr
="bzip2 compressed data" ;;
668 typeexpr
="lzip compressed data" ;;
670 typeexpr
="Zstandard compressed data" ;;
672 typeexpr
="compress'd data" ;;
674 typeexpr
="Zip archive data" ;;
676 typeexpr
="Java archive data (JAR)" ;;
678 typeexpr
="tar archive" ;;
680 typeexpr
="XZ compressed data" ;;
682 echo "WARNING: Unknown file extension: $gzfile"
685 if file "$gzfile.incomplete" |
grep -v "$typeexpr"
687 echo "ERROR: File type does not match" \
688 "filename ($typeexpr)!"
689 mv "$gzfile.incomplete" "$gzfile.extck-err"
691 mv "$gzfile.incomplete" "$gzfile"
696 protocol
="${url%%://*}"
698 # we need to use $location - $url is already mangled above -ReneR
699 # $protocol://$url $options
700 url
="`echo "$location" | sed "s
,$protocol://\
([^
]*\
).
*,\
1,"`"
701 options
="`echo "$location" | cut -s -d' ' -f2-`"
705 # the first option is the module name
706 module
="${options%% *}"
707 options
="${options#* }"
708 cmdline
="cvs -z4 -Q -d $url co -P $options $module"
710 # sometimes cvs wants to read ~/.cvspass just for fun ..
713 svn|svn\
+*) # allow any svn+ other transport and strip the svn+ part off
714 url
="${protocol#svn+}://$url"
715 options
="${options## *}"
716 if [ "$options" == "" -o "${options:0:1}" == "-" ]; then
717 # the module is the last dir of $url,
718 # w/ or wo/ trailing slash (/)
720 module
="${module##*/}"
722 # the first option is the module name
723 module
="${options%% *}"
724 [ "$module" = "$options" ] &&
725 options
= || options
="${options#* }"
727 cmdline
="svn export $options $url $module"
729 git|git\
+*) # allow any git+ other transport and strip the git+ part off
730 url
="${protocol#git+}://$url"
733 cmdline
="git clone --recursive $git_options $url $module"
734 options
="${options#* }"
735 [ -n $options ] && cmdpp
="(cd $module; git checkout $options)"
737 hg|hg\
+*) # allow any hg+ other transport and strip the hg+ part off
738 url
="${protocol#hg+}://$url"
741 cmdline
="hg clone $url $module"
742 options
="${options#* }"
743 [ -n $options ] && cmdpp
="(cd $module; hg clone $options)"
746 echo "$cmdclient unrecognized!"
751 cvsdir
="src/down.${protocol}dir.`echo $bzfile | tr / -`"
752 saved_pwd
=$PWD; mkdir
-p $cvsdir; cd $cvsdir
756 $cmdline ||
touch .cvs_error
759 while fuser .cvs_output
&> /dev
/null
; do
760 echo -ne `nice du -sh 2> /dev/null | \
761 cut -f1` 'downloaded from archive so far ... \r'
765 if [ -f .cvs_error
]; then
766 cd $saved_pwd; rm -rf $cvsdir
767 echo -e "\nError during checkout."
771 echo `du -sh 2> /dev/null | \
772 cut -f1` 'downloaded from archive (download finished).'
775 if [ `echo * | wc -w` -gt 1 ]; then
776 # multi-module module
777 echo "Multi-module package detected, relocating ..."
780 [ "$x" != "t2-module.$$" ] && mv -f $x t2-module.$$
/
783 mv -f t2-module.$$
/* "$module"
788 tarname
="`basename $bzfile`"
789 echo "Preparing files for final tarball."
790 [ -n "$cmdpp" ] && eval "$cmdpp"
791 find -type d \
( -name CVS
-o -name .svn
-o -name .git
-o -name .hg \
) |
xargs rm -rf
793 if [ `find -type f | wc -l` -gt 4 ]; then
794 find `basename $module` |
xargs touch -t 200001010000
795 tar --owner root
--group root \
796 -c `basename $module` |
$compressor > $tarname
797 mv $tarname $saved_pwd/$bzfile
799 echo "Too few files - assuming checkout failure."
803 cd $saved_pwd; rm -rf $cvsdir
811 grep -H '^\[D\] ' package
/*/*/*.desc
812 grep -H '^\[D\] ' {architecture
,target
}/*/package
/*/*.desc
813 grep -H '^[X0-9a-z]' target
/*/download.txt |
sed 's,:,:[D] ,'
820 # we know we only have single spaces due to list_dtags' column_clean
821 list_dtags |
sed -n \
822 -e 's,[^ ]* \([X0-9a-z]*\) \(.\)\([^ ]*\) -.*,\1 download/local/\2/\2\3,p' \
823 -e 's,[^ ]* \([X0-9a-z]*\) \(.\)\([^ ]*\) [^-].*,\1 download/mirror/\2/\2\3,p'
830 list_cksums | cut
-f2- -d' '
836 mkdir
-p src
/; list | bz2filename
> src
/down.$$.lst
837 ls download
/{Proxy
,Proxy-auth
,Me
,Mirror-Cache
} \
838 download
/mirror
/{README
,DOWNTEST
,LAST-UPDATE
} \
839 >> src
/down.$$.lst
2> /dev
/null
840 find download
/* -type f
-o -type l
2> /dev
/null |
842 grep -qx "$fn" src
/down.$$.lst ||
echo "Unknown file: $fn"
844 rm -f src
/down.$$.lst
850 list | bz2filename | \
852 [ -f "$fn" ] ||
echo "$fn"
859 packages
`echo package/$repository/*/*.desc`
864 # Choosen config must exist
866 if [ ! -f config
/$config/packages
]; then
867 echo "ERROR: Config $config doesn't exist."
868 echo "ERROR: try scripts/Config -cfg $config first."
872 while read on a b repo pkg c
; do
874 done < <(grep '^X' config
/$config/packages
)
876 targetchain
="$target"; x
="$target"
877 while [ -f "target/$x/extends" ]; do
878 x
="$(< target/$x/extends)"
879 targetchain
="$targetchain $x"
882 for target
in $targetchain; do
883 if [ -f target
/$target/download.txt
]; then
884 while read cksum file url
; do
885 download_file
"`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
886 done < target
/$target/download.txt
893 list_dtags | cut
-d ' ' -f 2- |
while read cksum file url
; do
894 download_file
"`source_file cksum $file "$url"`" "$url" "$cksum"
901 detect_confdir
# relies on $pkg being set
902 if [ ! "$confdir" ]; then
903 echo "Package $pkg not found!"
906 parse_desc
$pkg # relies on $pkg and $confdir being set
907 while read cksum file url
; do
908 download_file
"`source_file cksum $file "$url"`" "$url" "$cksum" "$repo" "$pkg"
909 done < <(echo "$desc_D")
917 if [ ! -f $arg ]; then
918 echo "Skipping \"$arg\" (not found)!"
922 target
="`echo $arg | cut -f2 -d/`"
924 while read cksum file url
; do
925 download_file
"`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
929 if [ "${arg%.desc}" != "$arg" ]; then
930 arg
="`echo $arg | cut -f3 -d/`"; fi
936 # pkg_*_{pre,post}.conf is only activated if extender
937 # is enabled on $config/packages, so we will only
938 # download files of those extenders
940 for extender
in `ls -1 package/*/*/pkg_${arg}_{pre,post}.conf 2> /dev/null |
941 cut -d/ -f3 | sort -u`; do
942 if grep -q "^X .* $extender " \
943 config
/$config/packages
; then
944 echo_info
"Also downloading $extender ..."
954 # Things to do only for downloading
956 if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
957 # Set proxy information
958 if [ -f download
/Proxy
]; then
959 proxy
="$(< download/Proxy)"
960 if [ "$proxy" ]; then
961 curl_options
="$curl_options --proxy $proxy"
963 echo "INFO: No proxy information, removing download/Proxy."
967 if [ -f download
/Proxy-auth
]; then
968 proxyauth
="$(< download/Proxy-auth)"
969 if [ "$proxyauth" ]; then
970 curl_options
="$curl_options --proxy-user $proxyauth"
971 git_options
="-c http.proxy=http://$proxyauth@$proxy"
973 echo "INFO: No proxy-auth information, removing download/Proxy-auth."
974 rm download
/Proxy-auth
979 if [ -z "`type -p curl`" ]; then
980 echo "ERROR: we need \`curl\` installed and available on \$PATH to proceed."
984 # Thing to do only once
986 if [ $this_is_the_2nd_run = 0 ]; then
987 # am i using a proxy?
988 # -- say i'm doing it even when i already did;-)
989 if [ "$proxy" ]; then
990 echo "INFO: Setting proxy to $proxy."
992 if [ "$proxyauth" ]; then
993 echo "INFO: Setting proxy authentication information."
996 # do mirror detection
1003 -list-unknown) list_unknown
;;
1004 -list-missing) list_missing
;;
1005 -list-cksums) list_cksums
;;
1007 -required) required
;;
1010 -repository) shift; repository
"$@" ;;
1012 -*|
"") exec $0 --help ;;