OpenSDE Framework (without history before r20070)
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

808 lines
20 KiB

  1. #!/bin/bash
  2. # --- SDE-COPYRIGHT-NOTE-BEGIN ---
  3. # This copyright note is auto-generated by ./scripts/Create-CopyPatch.
  4. #
  5. # Filename: bin/sde-download
  6. # Copyright (C) 2006 - 2007 The OpenSDE Project
  7. # Copyright (C) 2004 - 2006 The T2 SDE Project
  8. # Copyright (C) 1998 - 2003 Clifford Wolf
  9. #
  10. # More information can be found in the files COPYING and README.
  11. #
  12. # This program is free software; you can redistribute it and/or modify
  13. # it under the terms of the GNU General Public License as published by
  14. # the Free Software Foundation; version 2 of the License. A copy of the
  15. # GNU General Public License can be found in the file COPYING.
  16. # --- SDE-COPYRIGHT-NOTE-END ---
  17. #Description: Download sources
  18. #Alias: get
  19. set -e
  20. [ -n "$SDEROOT" ] ||
  21. export SDEROOT=$( cd "${0%/*}/.."; pwd -P )
  22. . $SDEROOT/lib/libsde.in
  23. . $SDEROOT/lib/functions.in
  24. download_usage() {
  25. cat <<EOT
  26. Usage:
  27. sde download <options> [ Package(s) ]
  28. sde download <options> [ Desc file(s) ]
  29. sde download <options> -repository Repositories
  30. sde download <options> [ -all | -required ]
  31. Options is an alias for:
  32. [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]
  33. [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]
  34. [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]
  35. [ -copy ] [ -move ]
  36. On default, this script auto-detects the best OpenSDE mirror.
  37. Mirrors can also be a local directories in the form of 'file:///<dir>'.
  38. sde download [ -list | -list-missing | -list-cksums ]
  39. See '-mirror none' output for help on bypassing the official mirrors.
  40. EOT
  41. }
  42. umask 022
  43. cd "$SDEROOT"
  44. # Handle options passed on the command line
  45. #
  46. mkdir -p tmp/ download/
  47. # Load system wide configuration for this tool
  48. #
  49. config=default mirror= altdir= proxy= proxyauth=
  50. if [ -s "$SDESETTINGS" ]; then
  51. eval $( $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" download )
  52. eval $( $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" download-$sdever )
  53. fi
  54. this_is_the_2nd_run=0
  55. checkonly=0 tryques=0 nocheck=0 notimeout=0
  56. options='-this_is_the_2nd_run '
  57. curl_options='-A opensde-downloader --disable-epsv --location -f'
  58. altcopy=link ; verbose=1
  59. downloaderror=0
  60. #
  61. while [ $# -gt 0 ]; do
  62. case "$1" in
  63. -this_is_the_2nd_run)
  64. this_is_the_2nd_run=1
  65. ;;
  66. -cfg)
  67. options="$options -cfg $2"
  68. config="$2" ; shift ;;
  69. -q)
  70. options="$options -q"
  71. verbose=0 ;;
  72. -nock)
  73. # -nock skips checksum checking (don't use lightly)
  74. options="$options -nock"
  75. nocheck=1 ; shift ;;
  76. -mirror)
  77. # -mirror uses a mirror for finding source files
  78. if [ "$2" = none ]; then
  79. echo
  80. echo "The option '-mirror none' is not supported anymore!"
  81. echo
  82. echo "You may edit \$HOME/.sde/settings if you really"
  83. echo "want to use the original download resources. However, this"
  84. echo "is not supported and if such a download fails, this is not"
  85. echo "a bug in the OpenSDE and doesn't neccessarily needs fixing."
  86. echo
  87. exit 1;
  88. elif [ "$2" = auto ]; then
  89. mirror=
  90. else
  91. options="$options -mirror $2"
  92. mirror="$2"
  93. $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" "download-$sdever.mirror=$mirror"
  94. fi
  95. shift ;;
  96. -check)
  97. # -check just validates the file using the checksum
  98. options="$options -check"
  99. checkonly=1 ;;
  100. -notimeout)
  101. # don't add timeout curl options
  102. options="$options -notimeout"
  103. notimeout=2 ;;
  104. -longtimeout)
  105. # don't add timeout curl options
  106. options="$options -longtimeout"
  107. notimeout=1 ;;
  108. -curl-opt)
  109. # additional curl options
  110. options="$options -curl-opt $2"
  111. curl_options="$curl_options `echo $2 | tr : ' '`"
  112. shift ;;
  113. -alt-dir)
  114. # check for an alternative directory where to search for
  115. # package source tarballs
  116. altdir=$( cd $2; pwd -P )
  117. options="$options -alt-dir $2"
  118. shift ;;
  119. -try-questionable)
  120. # also try to download questionable URLs
  121. options="$options -try-questionable"
  122. tryques=1 ;;
  123. -move) altcopy=move ;;
  124. -copy) altcopy=copy ;;
  125. *) break ;;
  126. esac
  127. shift
  128. done
  129. if [ $notimeout -eq 0 ] ; then
  130. curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
  131. fi
  132. if [ $notimeout -eq 1 ] ; then
  133. curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
  134. fi
  135. # proxy (server[:port])
  136. if [ -n "$proxy" ]; then
  137. curl_options="$curl_options --proxy $proxy"
  138. # proxy needs auth (username[:password])
  139. [ -z "$proxyauth" ] || curl_options="$curl_options --proxy-user $proxyauth"
  140. # only show once
  141. [ $this_is_the_2nd_run = 1 ] || echo_info "Using <$proxy> as ${proxyauth:+authenticated }http proxy."
  142. fi
  143. #Disable checking for certificates on https downloads
  144. curl_options="$curl_options -k"
  145. # Autodetect best Mirror and safe url in $mirror
  146. #
  147. detect_mirror() {
  148. if [ "$mirror" = "none" ] ; then
  149. echo_info "Using original download locations only."
  150. return
  151. elif [ "$mirror" = "broken" ]; then
  152. echo_warning "Previous detection of the mirror failed, trying again."
  153. elif [ -n "$mirror" ]; then
  154. echo_info "Using mirror <$mirror>."
  155. return
  156. fi
  157. echo_warning "Auto-detecting best mirror ..."
  158. echo_info "Downloading mirror-list from opensde.net."
  159. curl -s -S $curl_options -o tmp/Download-Mirror-List \
  160. "http://opensde.net/opensde-download-mirrors/$sdever"
  161. if [ -r tmp/Download-Mirror-List ]; then
  162. bash lib/sde-download/mirror-test.sh < tmp/Download-Mirror-List
  163. fi 2>&1 | echo_info
  164. # read new mirror info
  165. mirror=
  166. eval $( $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" download-$sdever )
  167. if [ -z "$mirror" ]; then
  168. echo_error "Mirror detection loop hit a bug!"
  169. elif [ "$mirror" == "broken" ]; then
  170. echo_warning "No Mirror Found!"
  171. else
  172. echo_info "Using mirror <$mirror>."
  173. fi
  174. }
  175. # download_file local-filename download-location cksum repo pkg
  176. #
  177. # This function decides if download directly or from a mirror,
  178. # validates checksum, etc.
  179. # Calls download_file_now to do the actual download.
  180. #
  181. download_file() {
  182. # Init
  183. #
  184. local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
  185. # Make src directory for creating tar balls
  186. mkdir -p tmp/
  187. # Tarball file name:
  188. bzfile="`bz2filename "$gzfile"`"
  189. # Remove optional '-' prefix from $location
  190. [ "${location:0:1}" == '-' ] && location="${location:1}"
  191. # Lock file name:
  192. lkfile="tmp/down.lockfile.`echo $bzfile | tr / -`"
  193. # Check if it's already there
  194. #
  195. [ -s "$bzfile" -a $checkonly != 1 ] && return 0
  196. # Make locking
  197. #
  198. if [ -s "$lkfile" ]; then
  199. echo "Found $lkfile -> skip download."
  200. return 0
  201. fi
  202. trap 'rm -f "$lkfile"' INT
  203. echo $$ > "$lkfile"
  204. # Check if we only like to test the cksum(s)
  205. #
  206. if [ $checkonly = 1 ] ; then
  207. gzfile="$bzfile"
  208. if [ ! -f "$bzfile" ] ; then
  209. echo "File missing: $bzfile"
  210. rm -f "$lkfile" ; trap INT ; return 1
  211. fi
  212. if [ -z "${cksum##X*}" ] ; then
  213. echo "No checksum (ignore): $bzfile"
  214. rm -f "$lkfile" ; trap INT ; return 1
  215. fi
  216. if [ "$cksum" -eq 0 ] ; then
  217. echo "No checksum (missing): $bzfile"
  218. rm -f "$lkfile" ; trap INT ; return 1
  219. fi
  220. elif [ -s "$gzfile" ] ; then
  221. echo ; echo "Already downloaded $pkg:$gzfile ..."
  222. else
  223. echo ; echo "Downloading $pkg:$gzfile ..."
  224. # Existing *.cksum-err
  225. #
  226. if [ -s "$gzfile.cksum-err" ] ; then
  227. # cksum-err file alread exists:
  228. echo "ERROR: Found $gzfile.cksum-err."
  229. echo "ERROR: That means that we downloaded the" \
  230. "file already and it had an"
  231. echo "ERROR: incorrect checksum. Remove the" \
  232. "*.cksum-err file to force a"
  233. echo "ERROR: new download of that file."
  234. rm -f "$lkfile" ; trap INT ; return 1
  235. fi
  236. # Existing *.extck-err
  237. #
  238. if [ -s "$gzfile.extck-err" ] ; then
  239. # extck-err file alread exists:
  240. echo "ERROR: Found $gzfile.extck-err."
  241. echo "ERROR: That means that we downloaded the" \
  242. "file already and it's content"
  243. echo "ERROR: did not match it's filename extension." \
  244. "Remove the *.extck-err file"
  245. echo "ERROR: to force a new download of that file."
  246. rm -f "$lkfile" ; trap INT ; return 1
  247. fi
  248. # Questionable URL
  249. #
  250. if [ "$location" != "${location#\?}" ] ; then
  251. if [ "$tryques" = 0 ] ; then
  252. echo "ERROR: URL is marked as questionable." \
  253. "Not downloading this file."
  254. rm -f "$lkfile" ; trap INT ; return 1
  255. else
  256. echo "WARNING: URL is marked as questionable." \
  257. "Downloading it anyways."
  258. location="${location#\?}"
  259. fi
  260. fi
  261. # Make directory (if required)
  262. #
  263. if [ ! -d `dirname "$bzfile"` ] ; then
  264. mkdir -p `dirname "$bzfile"`
  265. fi
  266. # Alternative Directory
  267. #
  268. if [ "$altdir" ] ; then
  269. altfile=$(find $altdir/ -name `basename $bzfile` | head -n 1)
  270. else
  271. altfile=""
  272. fi
  273. #FIXME: compatibility, can be removed sooner or later...
  274. # Check old download dir layout
  275. if [ -z "$altfile" ]; then
  276. if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
  277. altfile="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
  278. fi
  279. fi
  280. if [ "$altfile" ] ; then
  281. echo "Found `basename $bzfile` as $altfile."
  282. if [ "$altcopy" = 'link' ]; then
  283. cp -lv $altfile $bzfile
  284. elif [ "$altcopy" = 'copy' ]; then
  285. cp -v $altfile $bzfile
  286. elif [ "$altcopy" = 'move' ]; then
  287. mv -v $altfile $bzfile
  288. fi
  289. gzfile="$bzfile"
  290. else
  291. # Mirroring
  292. #
  293. if [ -n "$mirror" -a "$mirror" != "none" -a "$mirror" != "broken" -a -z "${bzfile##download/mirror/*}" ] ; then
  294. # try to use mirror
  295. if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
  296. echo "INFO: download from mirror failed, trying original URL."
  297. download_file_now "$location" $gzfile $bzfile \
  298. || downloaderror=1
  299. else
  300. gzfile="$bzfile"
  301. fi
  302. else
  303. # don't want to use mirror
  304. download_file_now "$location" $gzfile $bzfile \
  305. downloaderror=1
  306. fi
  307. fi
  308. if [ ! -s "$gzfile" ]; then
  309. rm -f "$lkfile" ; trap INT ; return 1
  310. fi
  311. fi
  312. # unsign .gpg file
  313. if [[ $gzfile = *.gpg ]]; then
  314. gzfile=${gzfile%.gpg}
  315. if [ -f $gzfile.gpg ]; then
  316. echo "unsigning GnuPG file: $gzfile.gpg"
  317. gpg $gzfile.gpg
  318. fi
  319. if [ ! -f $gzfile ]; then
  320. echo "unsigning failed"
  321. rm -f "$lkfile" ; trap INT ; return 1
  322. fi
  323. fi
  324. sh ./lib/sde-download/validate.sh "$gzfile" "$bzfile" "$cksum" || downloaderror=1
  325. # Free Lock and finish
  326. #
  327. rm -f "$lkfile" ; trap INT ; return 0
  328. }
  329. # download_file_now location remote_filename local_filename
  330. #
  331. # This function executes the actual download using curl.
  332. #
  333. download_file_now() {
  334. local location="$1" gzfile="$2" bzfile="$3" curlret=0
  335. # Create URL
  336. #
  337. case "$location" in
  338. manual://*) url="$location" ;;
  339. !*) url="${location#!}" ;;
  340. *) url="${location%/*}/${gzfile##*/}" ;;
  341. esac
  342. # Download
  343. #
  344. case "$url" in
  345. manual://*)
  346. # Determine if the file has already been downloaded
  347. # manually. For this we first look in $HOME then in
  348. # download/manual.
  349. downloadpath=${altdir:-$HOME}
  350. downloadfile="${gzfile##*/}"
  351. if [ -e $downloadpath/$downloadfile ]; then
  352. location="file://$downloadpath/"
  353. else
  354. location="http://${url#manual://}"
  355. # No manual download has taken place yet.
  356. # So inform the user to do so.
  357. cat <<-EOT
  358. The file $downloadfile can not be fetched automatically
  359. please visit: $location
  360. and download it manually into $HOME or somewhere else using -alt-dir
  361. EOT
  362. return 1;
  363. fi
  364. # I am to lazy to do the copy and conversion myself,
  365. # so I use this function again with a modified
  366. # download location.
  367. download_file_now "$location" $gzfile $bzfile
  368. return "$?"
  369. ;;
  370. http://*|https://*|ftp://*|file://*)
  371. if [ -s "$gzfile.incomplete" ] ; then
  372. echo "INFO: Trying to resume previous download .."
  373. resume="-C -"
  374. else
  375. resume=""
  376. fi
  377. [ -s download/translations.sed ] &&
  378. trfile=download/translations.sh ||
  379. trfile=etc/download.sed
  380. trurl="$( echo "$url" | sed -f $trfile )"
  381. if [ -n "$trurl" -a "$trurl" != "$url" ]; then
  382. echo "INFO: url translated."
  383. url="$trurl"
  384. fi
  385. unset trurl trfile
  386. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
  387. curlret="$?"
  388. if [ "$resume" ] && \
  389. [ $curlret -eq 33 -o $curlret -eq 36 ] ; then
  390. echo "INFO: Resuming download not possible. ->" \
  391. "Overwriting old file."
  392. rm -f "$gzfile.incomplete"
  393. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
  394. curlret="$?"
  395. fi
  396. if [ $curlret -ne 0 ] ; then
  397. case "$curlret" in
  398. 18)
  399. echo "WARNING: Got only some of the" \
  400. "file. A re-run of $0"
  401. echo "WARNING: is required to complete" \
  402. "the download." ;;
  403. 130)
  404. echo -e '\rWARNING: CURL got a SIGINT' \
  405. "(someone pressed Ctrl-C). A re-run of"
  406. echo "WARNING: $0 is required to complete" \
  407. "the download." ; sleep 1 ;;
  408. *)
  409. echo "$curlret $gzfile $url" \
  410. >> tmp/Download-Errors
  411. echo -e '\rERROR: CURL Returned Error' \
  412. "$curlret. Please read" \
  413. "the curl manpage." ;;
  414. esac
  415. return 1
  416. elif [ ! -s "$gzfile.incomplete" ] ; then
  417. echo "0 $gzfile $url" >> tmp/Download-Errors
  418. echo "ERROR: CURL returned success but" \
  419. "we have no data!"
  420. curlret=1
  421. else
  422. case "$gzfile" in
  423. *.gz|*.tgz)
  424. typeexpr="gzip compressed data" ;;
  425. *.bz2|*.tbz2|*.tbz)
  426. typeexpr="bzip2 compressed data" ;;
  427. *.Z|*.tZ)
  428. typeexpr="compress'd data" ;;
  429. *.zip|*.jar)
  430. typeexpr="Zip archive data" ;;
  431. *.tar)
  432. typeexpr="tar archive" ;;
  433. *)
  434. echo "WARNING: Unkown file extension: $gzfile"
  435. typeexpr="." ;;
  436. esac
  437. if file "$gzfile.incomplete" | grep -v "$typeexpr"
  438. then
  439. echo "ERROR: File type does not match" \
  440. "filename ($typeexpr)!"
  441. mv "$gzfile.incomplete" "$gzfile.extck-err"
  442. else
  443. mv "$gzfile.incomplete" "$gzfile"
  444. fi
  445. fi
  446. ;;
  447. *)
  448. protocol="${url%%://*}"
  449. # we need to use $location - $url is already mangled above -ReneR
  450. # $protocol://$url $options
  451. url="`echo "$location" | sed "s,$protocol://\([^ ]*\).*,\1,"`"
  452. options="`echo "$location" | cut -d' ' -f2-`"
  453. case "$protocol" in
  454. cvs)
  455. # the first option is the module name
  456. module="${options%% *}"
  457. options="${options#* }"
  458. cmdline="cvs -z4 -Q -d $url co -P $options $module"
  459. # sometimes cvs wants to read ~/.cvspass just for fun ..
  460. touch $HOME/.cvspass
  461. ;;
  462. svn|svn\+http)
  463. if [ "$protocol" == "svn+http" ]; then
  464. url="http://$url"
  465. else
  466. url="svn://$url"
  467. fi
  468. if [ "${options:0:1}" == "-" ]; then
  469. # the module is the last dir of $url
  470. module="${url##*/}"
  471. else
  472. # the first option is the module name
  473. module="${options%% *}"
  474. options="${options#* }"
  475. fi
  476. cmdline="svn co $options $url $module"
  477. ;;
  478. *)
  479. echo "$cmdclient unrecognized!"
  480. return 1
  481. ;;
  482. esac
  483. cvsdir="tmp/down.${protocol}dir.`echo $bzfile | tr / -`"
  484. saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir
  485. echo "$cmdline"
  486. {
  487. $cmdline || touch .cvs_error
  488. } &> .cvs_output &
  489. while fuser .cvs_output &> /dev/null ; do
  490. echo -ne `nice du -sh 2> /dev/null | \
  491. cut -f1` 'downloaded from archive so far...\r'
  492. sleep 3
  493. done
  494. if [ -f .cvs_error ] ; then
  495. cd $saved_pwd ; rm -rf $cvsdir
  496. echo -e "\nError during checkout."
  497. return 1
  498. fi
  499. echo `du -sh 2> /dev/null | \
  500. cut -f1` 'downloaded from archive (download finished).'
  501. if [ `echo * | wc -w` -gt 1 ]; then
  502. # multi-module module
  503. echo "Multi-module package detected, relocating..."
  504. mkdir t2-module.$$
  505. for x in *; do
  506. [ "$x" != "t2-module.$$" ] && mv -f $x t2-module.$$/
  507. done
  508. mkdir -p "$module"
  509. mv -f t2-module.$$/* "$module"
  510. rm -f t2-module.$$
  511. fi
  512. cd `dirname $module`
  513. tarname="`basename $bzfile`"
  514. echo "Preparing files for final tarball ..."
  515. find -type d \( -name CVS -o -name .svn \) | xargs rm -rf
  516. if [ `find -type f | wc -l` -gt 4 ]; then
  517. find `basename $module` | xargs touch -t 200001010000
  518. tar --owner root --group root \
  519. --use-compress-program=bzip2 \
  520. -cf $tarname `basename $module`
  521. mv $tarname $saved_pwd/$bzfile
  522. else
  523. echo "Too few files - assuming checkout failure."
  524. curlret=1
  525. fi
  526. cd $saved_pwd ; rm -rf $cvsdir
  527. ;;
  528. esac
  529. return $curlret
  530. }
  531. list_dtags() {
  532. {
  533. grep -H '^\[D\] ' package/*/*/*.desc
  534. grep -H '^[X0-9]' target/*/download.txt 2> /dev/null | sed 's,:,:[D] ,'
  535. } | column_clean
  536. }
  537. list_cksums() {
  538. trap '' INT
  539. # we know we only have single spaces due to list_dtags' column_clean
  540. list_dtags | sed -n \
  541. -e 's,[^ ]* \([X0-9]*\) \(.\)\([^ ]*\) -.*,\1 download/local/\2/\2\3,p' \
  542. -e 's,[^ ]* \([X0-9]*\) \(.\)\([^ ]*\) [^-].*,\1 download/mirror/\2/\2\3,p'
  543. trap INT
  544. }
  545. list() {
  546. trap '' INT
  547. list_cksums | cut -f2- -d' '
  548. trap INT
  549. }
  550. list_missing() {
  551. trap '' INT
  552. list | bz2filename | \
  553. while read fn ; do
  554. [ -f "$fn" ] || echo "$fn"
  555. done
  556. trap INT
  557. }
  558. repository() {
  559. for repository ; do
  560. packages `echo package/$repository/*/*.desc`
  561. done
  562. }
  563. required() {
  564. # Choosen config must exist
  565. #
  566. if [ ! -f config/$config/packages ]; then
  567. echo "ERROR: Config $config doesn't exist."
  568. echo "ERROR: try ./scripts/Config -cfg $config first."
  569. exit 1
  570. fi
  571. while read on a b repo pkg c ; do
  572. if [ "$on" = "X" ] ; then
  573. grep -H '^\[D\] ' package/$repo/$pkg/$pkg.desc > tmp/down.$$.lst
  574. while read tag cksum file url ; do
  575. download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$repo" "$pkg"
  576. done < tmp/down.$$.lst ; rm -f tmp/down.$$.lst
  577. fi
  578. done < config/$config/packages
  579. # NOTE: ROCKCFG -> SDECFG automatized convertion
  580. grep -q 'ROCKCFG' ./config/$config/config &&
  581. sed -i -e 's,ROCKCFG,SDECFG,g' ./config/$config/config
  582. target=`grep '^export SDECFG_TARGET=' config/$config/config | \
  583. cut -f2 -d= | tr -d "'"`
  584. targetchain="$target"; x="$target"
  585. while [ -f "target/$x/extends" ]; do
  586. x="`cat target/$x/extends`"
  587. targetchain="$targetchain $x"
  588. done
  589. for target in $targetchain; do
  590. if [ -f target/$target/download.txt ] ; then
  591. while read cksum file url ; do
  592. download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
  593. done < target/$target/download.txt
  594. fi
  595. done
  596. }
  597. all() {
  598. local each repo pkg
  599. for repo in $( cd package; ls -1 ); do
  600. for each in package/$repo/*/*.desc; do
  601. pkg="`echo $each | cut -f3 -d/`"
  602. while read tag cksum file url ; do
  603. download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$repo" "$pkg"
  604. done < <(grep -H '^\[D\] ' package/$repo/$pkg/$pkg.desc)
  605. done
  606. done
  607. for each in $( ls -1 target/*/download.txt 2> /dev/null ); do
  608. target="`echo $each | cut -f2 -d/`"
  609. while read cksum file url ; do
  610. download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
  611. done < <(cat $each)
  612. done
  613. }
  614. package() {
  615. descfile="`echo package/*/$1/$1.desc`"
  616. if [ ! -f $descfile ]; then
  617. echo "Skipping \"$1\" (not found)!"
  618. return
  619. fi
  620. pkg="`echo $descfile | cut -f3 -d/`"
  621. repo="`echo $descfile | cut -f2 -d/`"
  622. while read tag cksum file url ; do
  623. download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$repo" "$pkg"
  624. done < <(grep -H '^\[D\] ' package/$repo/$pkg/$pkg.desc)
  625. }
  626. packages() {
  627. local descfile
  628. for arg; do
  629. case "$arg" in
  630. target/*)
  631. if [ ! -f $arg ]; then
  632. echo "Skipping \"$arg\" (not found)!"
  633. continue
  634. fi
  635. target="`echo $arg | cut -f2 -d/`"
  636. while read cksum file url ; do
  637. download_file "`source_file cksum $file "$url"`" "$url" "$cksum" "$target"
  638. done < <(cat $arg)
  639. ;;
  640. *)
  641. if [ "${arg%.desc}" != "$arg" ]; then
  642. arg="`echo $arg | cut -f3 -d/`"; fi
  643. # active extensions
  644. local extender=
  645. # pkg_*_{pre,post}.conf is only activated if extender
  646. # is enabled on $config/packages, so we will only
  647. # download files of those extenders
  648. #
  649. for extender in `ls -1 package/*/*/pkg_${arg}_{pre,post}.conf 2> /dev/null |
  650. cut -d/ -f3 | sort -u`; do
  651. if grep -q "^X .* $extender " \
  652. config/$config/packages; then
  653. echo_info "Also downloading $extender ..."
  654. package $extender
  655. fi
  656. done
  657. package $arg
  658. ;;
  659. esac
  660. done
  661. }
  662. set +e
  663. # Things to do only for downloading
  664. #
  665. if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
  666. # we need curl
  667. if [ -z "`type -p curl`" ]; then
  668. echo_abort 2 "we need \`curl\` installed and available on \$PATH to proceed."
  669. fi
  670. # do mirror detection, only once
  671. [ $this_is_the_2nd_run = 1 ] || detect_mirror
  672. fi
  673. case "$1" in
  674. -list) list ;;
  675. -list-missing) list_missing ;;
  676. -list-cksums) list_cksums ;;
  677. -required) required ;;
  678. -all) all ;;
  679. -repository) shift ; repository "$@" ;;
  680. -*|"") download_usage
  681. exit 1;;
  682. *) packages "$@" ;;
  683. esac
  684. exit $downloaderr