mirror of the now-defunct rocklinux.org
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

980 lines
27 KiB

  1. #!/bin/bash
  2. #
  3. # --- ROCK-COPYRIGHT-NOTE-BEGIN ---
  4. #
  5. # This copyright note is auto-generated by ./scripts/Create-CopyPatch.
  6. # Please add additional copyright information _after_ the line containing
  7. # the ROCK-COPYRIGHT-NOTE-END tag. Otherwise it might get removed by
  8. # the ./scripts/Create-CopyPatch script. Do not edit this copyright text!
  9. #
  10. # ROCK Linux: rock-src/scripts/Download
  11. # ROCK Linux is Copyright (C) 1998 - 2005 Clifford Wolf
  12. #
  13. # This program is free software; you can redistribute it and/or modify
  14. # it under the terms of the GNU General Public License as published by
  15. # the Free Software Foundation; either version 2 of the License, or
  16. # (at your option) any later version. A copy of the GNU General Public
  17. # License can be found at Documentation/COPYING.
  18. #
  19. # Many people helped and are helping developing ROCK Linux. Please
  20. # have a look at http://www.rocklinux.org/ and the Documentation/TEAM
  21. # file for details.
  22. #
  23. # --- ROCK-COPYRIGHT-NOTE-END ---
  24. #
  25. # Run this command from the ROCK directory as ./scripts/Download [ options ]
  26. #
  27. # It enables you to download source files as described in the package
  28. # definitions (optionally using a mirroring 'cache' server).
  29. #
  30. # This script also allows for checksum display/validation.
  31. umask 022
  32. . scripts/functions
  33. if [ "$1" = '-help' ] ; then
  34. { echo
  35. echo "Usage:"
  36. echo
  37. echo " ./scripts/Download [ options ] [ <Package(s)> ]"
  38. echo " ./scripts/Download [ options ] [ <Desc file(s)> ]"
  39. echo " ./scripts/Download [ options ] -repository <Repositories>"
  40. echo " ./scripts/Download [ options ] -sm <SM-PATCH-ID(s)>"
  41. echo " ./scripts/Download [ options ] { -all | -required }"
  42. echo
  43. echo " Download files required for given packages, package description files, package"
  44. echo " repositories, import packages, or build configurations."
  45. echo " On default, this script auto-detects the best ROCK Linux mirror."
  46. echo " See '-mirror none' output for help on bypassing the official mirrors."
  47. echo
  48. echo " -all download all files for a build configuration"
  49. echo " -required download only files for packages that will be built in"
  50. echo " the given configuration"
  51. echo
  52. echo " Where [ options ] is an alias for:"
  53. echo " [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
  54. echo " [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]"
  55. echo " [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
  56. echo " [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
  57. echo " [ -copy ] [ -move ]"
  58. echo
  59. echo " -cfg <config> download files for the given configuration"
  60. echo " -nock skip checksum checks (don't use lightly)"
  61. echo " -alt-dir <AlternativeDirectory>"
  62. echo " check for files to download also in the given directory"
  63. echo " -mirror <URL> set the download mirror to use"
  64. echo " Mirrors can also be local directories in the form"
  65. echo " of 'file:///<dir>'"
  66. echo " -check check checksums only; don't download files"
  67. echo " -try-questionable also try to download from URLs marked as"
  68. echo " questionable"
  69. echo " -notimeout don't apply timeout settings to curl"
  70. echo " -longtimeout apply long timeout settings"
  71. echo " By default, timeouts for connection and speed-limit"
  72. echo " are set"
  73. echo " -curl-opt <curl-option> pass option(s) to curl"
  74. echo " -proxy <server>[:<port>]"
  75. echo " -proxy-auth <username>[:<password>]"
  76. echo " pass proxy and proxy authentication to curl"
  77. echo " Warning: authentication can be seen with ps!"
  78. echo " -copy copy files from the old download directory layout,"
  79. echo " if existent"
  80. echo " -move move files instead"
  81. echo " Default is to link files, if existent, from the old"
  82. echo " layout to the new one"
  83. echo
  84. echo " ./scripts/Download -mk-cksum <Filename(s)>"
  85. echo " ./scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
  86. echo
  87. echo " -mk-cksum <Filenames> calculate checksums on files as used in package"
  88. echo " descriptions"
  89. echo " -list all files a complete download directory would contain"
  90. echo " -list-cksums as -list, with an checksum for each file"
  91. echo " -list-unknown files in the download directory that are not in any"
  92. echo " package description, e.g. old files"
  93. echo " -list-missing files in package descriptions that are not"
  94. echo " downloaded (yet)"
  95. echo ; } >&2
  96. exit 1
  97. fi
  98. # -mk-cksum mode (display ROCK type package checksum): it
  99. # displays the checksum ROCK validates against.
  100. #
  101. # Currently bz2, tbz2, gz, tgz, Z are unpacked
  102. #
  103. if [ "$1" = -mk-cksum ] ; then
  104. shift
  105. for x ; do
  106. echo -n "$x: "
  107. if [ ! -f "$x" ] ; then
  108. echo "No such file."
  109. elif [ "${x%.bz2}" != "$x" -o "${x%.tbz2}" != "$x" ] ; then
  110. bunzip2 < "$x" | cksum | cut -f1 -d' '
  111. elif [ "${x%.gz}" != "$x" -o "${x%.tgz}" != "$x" ] ; then
  112. gunzip < "$x" | cksum | cut -f1 -d' '
  113. elif [ "${x%.Z}" != "$x" ] ; then
  114. uncompress < "$x" | cksum | cut -f1 -d' '
  115. else
  116. cksum < "$x" | cut -f1 -d' '
  117. fi
  118. done
  119. exit 1
  120. fi
  121. # Handle options passed on the command line
  122. #
  123. mkdir -p src/ download/ ; config=default
  124. this_is_the_2nd_run=0
  125. mirror='' ; checkonly=0 ; altdir='' ; loop=1
  126. tryques=0 ; nocheck=0 ; options='-this_is_the_2nd_run '
  127. notimeout=0 ; curl_options='--disable-epsv --location'
  128. altcopy=link
  129. #
  130. while [ $loop -eq 1 ] ; do
  131. case "$1" in
  132. -this_is_the_2nd_run)
  133. this_is_the_2nd_run=1
  134. shift ;;
  135. -cfg)
  136. options="$options -cfg $2"
  137. config="$2" ; shift ; shift ;;
  138. -nock)
  139. # -nock skips checksum checking (don't use lightly)
  140. options="$options -nock"
  141. nocheck=1 ; shift ;;
  142. -mirror)
  143. # -mirror uses a mirror for finding source files
  144. if [ "$2" = none ]; then
  145. echo
  146. echo "The option '-mirror none' is not supported anymore!"
  147. echo
  148. echo "You may 'echo none > download/Mirror' if you really"
  149. echo "want to use the original download resources. However, this"
  150. echo "is not supported and if such a download fails, this is not"
  151. echo "a bug in ROCK Linux and doesn't neccessarily needs fixing."
  152. echo
  153. exit 1;
  154. else
  155. mkdir -p download
  156. echo "$2" > download/Mirror
  157. options="$options -mirror $2"
  158. mirror="$2"
  159. fi
  160. shift ; shift ;;
  161. -check)
  162. # -check just validates the file using the checksum
  163. options="$options -check"
  164. checkonly=1 ; shift ;;
  165. -notimeout)
  166. # don't add timeout curl options
  167. options="$options -notimeout"
  168. notimeout=2 ; shift ;;
  169. -longtimeout)
  170. # don't add timeout curl options
  171. options="$options -longtimeout"
  172. notimeout=1 ; shift ;;
  173. -curl-opt)
  174. # additional curl options
  175. options="$options -curl-opt $2"
  176. curl_options="$curl_options `echo $2 | tr : ' '`"
  177. shift ; shift ;;
  178. -proxy)
  179. # proxy option for curl
  180. mkdir -p download
  181. echo -n "$2" > download/Proxy
  182. options="$options -proxy $2"
  183. shift ; shift ;;
  184. -proxy-auth)
  185. # proxy authentication for curl - can be seen with ps!
  186. mkdir -p download
  187. echo -n "$2" > download/Proxy-auth
  188. chmod 600 download/Proxy-auth
  189. options="$options -proxy-auth $2"
  190. shift ; shift ;;
  191. -alt-dir)
  192. # check for an alternative directory where to search for
  193. # package source tarballs
  194. options="$options -alt-dir $2"
  195. altdir=$2 ; shift ; shift ;;
  196. -try-questionable)
  197. # also try to download questionable URLs
  198. options="$options -try-questionable"
  199. tryques=1 ; shift ;;
  200. -move) altcopy=move ; shift ;;
  201. -copy) altcopy=copy ; shift ;;
  202. *)
  203. loop=0 ;;
  204. esac
  205. done
  206. if [ $notimeout -eq 0 ] ; then
  207. curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
  208. fi
  209. if [ $notimeout -eq 1 ] ; then
  210. curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
  211. fi
  212. #Disable checking for certificates on https downloads
  213. curl_options="$curl_options -k"
  214. # build descparser if needed and run it
  215. descparser() {
  216. local var
  217. if [ ! -f src/descparser ]; then
  218. mkdir -p src
  219. cc -o src/descparser misc/tools-source/descparser.c
  220. fi
  221. if [ "$ignore_xpkg" == "0" ]; then
  222. var=""
  223. else
  224. var="descparser_ign_xpkg=1"
  225. fi
  226. cat "$@" | eval $var src/descparser
  227. }
  228. # cksum_chk filename cksum origfile
  229. #
  230. # This function verifies the checksum. If it fails it renames the file
  231. # to file.chksum-err and returns failure.
  232. #
  233. # It seams like the [ ] command has problems with comparing high numbers.
  234. # That's why I'm using a text comparison here.
  235. #
  236. # Not doing anything if checksum is '0' or a text of 'X'.
  237. #
  238. cksum_chk() {
  239. y="`echo $2 | sed 's,^0*,,;'`"
  240. [ $nocheck = 1 -o -z "$y" -o -z "${2//X/}" ] && return 0
  241. x="`cksum "$1" | cut -f1 -d' ' | sed 's,^0*,,;'`"
  242. if [ "$x" != "$y" ] ; then
  243. # Add .cksum-err extension to filename:
  244. echo "Cksum ERROR: $3.cksum-err ($x)"
  245. mv "$3" "$3.cksum-err" ; return 1
  246. fi
  247. return 0
  248. }
  249. # Autodetect best Mirror and safe url in $mirror
  250. #
  251. detect_mirror() {
  252. if [ -f download/Mirror ] ; then
  253. mirror="`cat download/Mirror`"
  254. if [ -z "$mirror" -o "$mirror" = "none" ] ; then
  255. echo "INFO: Found download/Mirror: none" \
  256. "(use the original download locations)" 1>&2
  257. else
  258. echo "INFO: Found cached mirror URL in download/Mirror:" 1>&2
  259. echo "INFO: $mirror" 1>&2
  260. fi
  261. echo "INFO: To force a new mirror auto-detection, remove download/Mirror." 1>&2
  262. else
  263. echo "INFO: Auto-detecting best mirror ..." 1>&2
  264. eval "$(egrep '^(rockver)=' scripts/parse-config)"
  265. echo "INFO: Downloading mirror-list from www.rocklinux.net." 1>&2
  266. curl -s -S $curl_options -o src/Download-Mirror-List \
  267. "http://www.rocklinux.net/mirrors.cgi?$rockver"
  268. bestval=0 ; result='No Mirror Found!'
  269. while read mirror_name ; do
  270. if [ "${mirror_name#=}" != "$mirror_name" ] ; then
  271. mirror_name="${mirror_name#= }"
  272. mirror_name="${mirror_name% =}"
  273. read mirror_url
  274. echo -n "INFO: Testing <$mirror_name> ..." 1>&2
  275. val="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST" \
  276. -w "ok %{speed_download}" -o /dev/null)"
  277. if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" ] ; then
  278. echo " error"
  279. else
  280. xval=`echo ${val#ok } | tr -d .` ; echo " $val"
  281. if [ "$xval" -gt "$bestval" ] ; then
  282. bestval=$xval ; mirror="${mirror_url%/}"
  283. result="Using mirror <$mirror>."
  284. fi
  285. fi
  286. fi
  287. done < src/Download-Mirror-List
  288. echo $mirror > download/Mirror
  289. echo "INFO: $result" 1>&2
  290. fi
  291. }
  292. # download_file local-filename download-location cksum repo pkg
  293. #
  294. # This function decides if download directly or from a mirror,
  295. # validates checksum, etc.
  296. # Calls download_file_now to do the actual download.
  297. #
  298. download_file() {
  299. # Init
  300. #
  301. local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
  302. # Make src directory for creating tar balls
  303. mkdir -p src/
  304. # Tarball file name: (if you change this one - also adapt Create-ISO)
  305. bzfile="`echo "$gzfile" | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,'`"
  306. # Lock file name:
  307. lkfile="src/down.lockfile.`echo $bzfile | tr / -`"
  308. # Check if it's already there
  309. #
  310. [ -s "$bzfile" -a $checkonly != 1 ] && return 0
  311. # Make locking
  312. #
  313. if [ -s "$lkfile" ]; then
  314. echo "Found $lkfile -> skip download."
  315. return 0
  316. fi
  317. trap 'rm -f "$lkfile"' INT
  318. echo $$ > "$lkfile"
  319. # Check if we only like to test the cksum(s)
  320. #
  321. if [ $checkonly = 1 ] ; then
  322. gzfile="$bzfile"
  323. if [ ! -f "$bzfile" ] ; then
  324. echo "File missing: $bzfile"
  325. rm -f "$lkfile" ; trap INT ; return 1
  326. fi
  327. if [ -z "${cksum##X*}" ] ; then
  328. echo "No checksum (ignore): $bzfile"
  329. rm -f "$lkfile" ; trap INT ; return 1
  330. fi
  331. if [ "$cksum" -eq 0 ] ; then
  332. echo "No checksum (missing): $bzfile"
  333. rm -f "$lkfile" ; trap INT ; return 1
  334. fi
  335. elif [ -s "$gzfile" ] ; then
  336. echo ; echo "Already downloaded $gzfile ..."
  337. else
  338. echo ; echo "Downloading $gzfile ..."
  339. # Existing *.cksum-err
  340. #
  341. if [ -s "$gzfile.cksum-err" ] ; then
  342. # cksum-err file alread exists:
  343. echo "ERROR: Found $gzfile.cksum-err."
  344. echo "ERROR: That means that we downloaded the" \
  345. "file already and it had an"
  346. echo "ERROR: incorrect checksum. Remove the" \
  347. "*.cksum-err file to force a"
  348. echo "ERROR: new download of that file."
  349. rm -f "$lkfile" ; trap INT ; return 1
  350. fi
  351. # Existing *.extck-err
  352. #
  353. if [ -s "$gzfile.extck-err" ] ; then
  354. # extck-err file alread exists:
  355. echo "ERROR: Found $gzfile.extck-err."
  356. echo "ERROR: That means that we downloaded the" \
  357. "file already and it's content"
  358. echo "ERROR: did not match it's filename extension." \
  359. "Remove the *.extck-err file"
  360. echo "ERROR: to force a new download of that file."
  361. rm -f "$lkfile" ; trap INT ; return 1
  362. fi
  363. # Questionable URL
  364. #
  365. if [ "$location" != "${location#\?}" ] ; then
  366. if [ "$tryques" = 0 ] ; then
  367. echo "ERROR: URL is marked as questionable." \
  368. "Not downloading this file."
  369. rm -f "$lkfile" ; trap INT ; return 1
  370. else
  371. echo "WARNING: URL is marked as questionable." \
  372. "Downloading it anyways."
  373. location="${location#\?}"
  374. fi
  375. fi
  376. # Make directory (if required)
  377. #
  378. if [ ! -d `dirname "$bzfile"` ] ; then
  379. mkdir -p `dirname "$bzfile"`
  380. fi
  381. # Alternative Directory
  382. #
  383. if [ "$altdir" ] ; then
  384. altfile=$(find $altdir/ -name `basename $bzfile` | head -n 1)
  385. else
  386. altfile=""
  387. fi
  388. #FIXME: compatibility, can be removed sooner or later...
  389. # Check old download dir layout
  390. if [ -z "$altfile" ]; then
  391. if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
  392. altfile="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
  393. fi
  394. fi
  395. if [ "$altfile" ] ; then
  396. echo "Found `basename $bzfile` as $altfile."
  397. if [ "$altcopy" = 'link' ]; then
  398. cp -lv $altfile $bzfile
  399. elif [ "$altcopy" = 'copy' ]; then
  400. cp -v $altfile $bzfile
  401. elif [ "$altcopy" = 'move' ]; then
  402. mv -v $altfile $bzfile
  403. fi
  404. gzfile="$bzfile"
  405. else
  406. # Mirroring
  407. #
  408. mirror="$( cat download/Mirror )"
  409. if [ -n "$mirror" -a "$mirror" != "none" -a -z "${bzfile##download/mirror/*}" ] ; then
  410. # try to use mirror
  411. if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
  412. echo "INFO: download from mirror failed, trying original URL." 1>&2
  413. download_file_now $location $gzfile $bzfile
  414. else
  415. gzfile="$bzfile"
  416. fi
  417. else
  418. # don't want to use mirror
  419. download_file_now $location $gzfile $bzfile
  420. fi
  421. fi
  422. if [ ! -s "$gzfile" ]; then
  423. rm -f "$lkfile" ; trap INT ; return 1
  424. fi
  425. fi
  426. # Convert a .gz to .bz2 and test checksum
  427. #
  428. if [ "$gzfile" != "$bzfile" ] ; then
  429. echo "bzip'ing + cksum-test: $gzfile"
  430. gunzip < "$gzfile" > src/down.$$.dat
  431. if cksum_chk src/down.$$.dat $cksum "$gzfile" ; then
  432. bzip2 < src/down.$$.dat > "$bzfile" ; rm -f "$gzfile"
  433. fi
  434. rm -f src/down.$$.dat
  435. # Execute a cksum test on a bzip2 file
  436. #
  437. elif [ "${gzfile%.bz2}" != "$gzfile" -o \
  438. "${gzfile%.tbz2}" != "$gzfile" ]
  439. then
  440. echo "cksum-test (bzip2): $bzfile"
  441. if [ $nocheck = 0 ] ; then
  442. bunzip2 < "$bzfile" > src/down.$$.dat
  443. cksum_chk src/down.$$.dat $cksum "$bzfile"
  444. fi
  445. rm -f src/down.$$.dat
  446. # Execute a cksum test on a raw data file
  447. #
  448. else
  449. echo "cksum-test (raw): $gzfile"
  450. cksum_chk "$gzfile" $cksum "$gzfile"
  451. fi
  452. # Free Lock and finish
  453. #
  454. rm -f "$lkfile" ; trap INT ; return 0
  455. }
  456. # download_file_now location remote_filename local_filename
  457. #
  458. # This function executes the actual download using curl.
  459. #
  460. download_file_now() {
  461. local location="$1" gzfile="$2" bzfile="$3"
  462. # Create URL
  463. #
  464. if [ "${location#!}" != "$location" ] ; then
  465. url="`echo "$location" | sed 's,!,,'`"
  466. else
  467. url="`echo "$location" | \
  468. sed 's,/[^/]*$,,'`/`echo $gzfile | sed 's,.*/,,'`"
  469. fi
  470. # Check for existing Error Log
  471. #
  472. if test -s src/Download-Errors &&
  473. grep -q " $url\$" src/Download-Errors ; then
  474. echo "ERROR: According to src/Download-Errors" \
  475. "we had already an error for the URL"
  476. echo "ERROR: $url"
  477. echo "ERROR: So I'm not trying to download" \
  478. "it again (remove src/Download-Errors"
  479. echo "ERROR: if you want to force a retry)."
  480. return 1
  481. fi
  482. # Download
  483. #
  484. if [[ $url = svn://* ]] ; then
  485. # svn://mode:[login[:password]@]server[:port]:/path::revision/
  486. urlorig=${url}
  487. url=${location#!}
  488. url="${url#svn://}"; url="${url%/}"
  489. mode="${url%%:*}"
  490. url="${url#*:}"
  491. if [ "${url%@*}" = "${url}" ] ; then
  492. username=""
  493. password=""
  494. else
  495. username="${url%%@*}"
  496. if [ "${username%:*}" != "${username}" ] ; then
  497. password="--password ${username#*:}"
  498. username="${username%%:*}"
  499. fi
  500. username="--username ${username}"
  501. fi
  502. url="${url##*@}"
  503. rev="${url##*::}"
  504. if [ -z "${rev}" -o "${rev}" == "${url}" ] ; then
  505. rev=""
  506. else
  507. rev="-r ${rev}"
  508. fi
  509. url="${url%::*}"
  510. old=${PWD}
  511. tmp="`mktemp`"
  512. rm -rf ${tmp}
  513. dir=${bzfile%.tar.bz2}
  514. dir="`basename ${dir}`"
  515. mkdir -p ${tmp}
  516. cd ${tmp}
  517. echo SVN ${username} ${password} ${rev} ${mode}://${url}
  518. { echo svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
  519. if ! svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
  520. then touch .svn_error ; fi
  521. } &> .svn_output &
  522. while fuser .svn_output &> /dev/null ; do
  523. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  524. 'SVN archive so far...\r'
  525. sleep 3
  526. done
  527. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  528. 'SVN archive (download finished).'
  529. if [ ! -f .svn_error ] ; then
  530. rm -f .svn_output
  531. tar --owner root --group root \
  532. --use-compress-program=bzip2 \
  533. -cf ${dir}.tar.bz2 ${dir}
  534. mv ${dir}.tar.bz2 ${old}/${bzfile}
  535. cd ${old} ; rm -rf ${tmp}
  536. else
  537. cat .svn_output
  538. cd $old ; rm -rf $tmp
  539. echo ERROR: SVN export ${username} ${password} ${mode}://${url} ${dir} \
  540. returned an error.
  541. echo "0 $gzfile $urlorig" >> src/Download-Errors
  542. fi
  543. elif [[ $url = cvs://* ]] ; then
  544. # cvs://mode:[login[:password]@]server[:port]:/path::module!revision/
  545. # btw, at least current cvs supports password at CVSROOT.
  546. url="${url#cvs://}"; url="${url%/*}"
  547. # cvs://mode:loc::module!date/
  548. #
  549. mode="${url%%:*}"; loc="${url#*:}"
  550. module="${loc##*::}"; loc="${loc%%::*}"
  551. revision="${module#*!}"; module="${module%%!*}"
  552. [[ $loc != *@* ]] && loc="anonymous@$loc"
  553. # everything after the first 'bang' (!) is analysed here
  554. # someday we could add more cvs options.
  555. #
  556. dat="$( echo $revision | \
  557. sed -n -e 's,\([0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}\),-D \1,p' )"
  558. cvsdir="src/down.cvsdir.`echo $bzfile | tr / -`"
  559. saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir
  560. echo CVS $mode $loc $dat $module
  561. { [ $mode = ssh ] && export CVS_RSH=ssh
  562. [ $mode = pserver ] && loc=":pserver:$loc"
  563. # sometimes cvs wants to read ~/.cvspass just for fun ..
  564. touch $HOME/.cvspass
  565. # for ssh we need some way to quitely accept the key ...
  566. echo cvs -z9 -Q -d $loc checkout $dat -P $module
  567. if ! cvs -z9 -Q -d $loc checkout $dat -P $module
  568. then touch .cvs_error ; fi
  569. } &> .cvs_output &
  570. while fuser .cvs_output &> /dev/null ; do
  571. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  572. 'CVS archive so far...\r'
  573. sleep 3
  574. done
  575. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  576. 'CVS archive (download finished).'
  577. if [ ! -f .cvs_error ] ; then
  578. cd `dirname $module`
  579. dir="`echo "$bzfile" | sed s/\.tar\.bz2$//`"
  580. dir="`basename $dir`"
  581. mv `basename $module` $dir
  582. tar --owner root --group root \
  583. --use-compress-program=bzip2 \
  584. -cf $dir.tar.bz2 $dir
  585. mv $dir.tar.bz2 $saved_pwd/$bzfile
  586. cd $saved_pwd ; rm -rf $cvsdir
  587. else
  588. cat .cvs_output
  589. cd $saved_pwd ; rm -rf $cvsdir
  590. echo ERROR: CVS $dat $loc $module \
  591. returned an error.
  592. echo "0 $gzfile $url" >> src/Download-Errors
  593. fi
  594. else
  595. if [ -s "$gzfile.incomplete" ] ; then
  596. echo "INFO: Trying to resume previous download .." 1>&2
  597. resume="-C -"
  598. else
  599. resume=""
  600. fi
  601. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
  602. curlret="$?"
  603. if [ "$resume" ] && \
  604. [ $curlret -eq 33 -o $curlret -eq 36 ] ; then
  605. echo "INFO: Resuming download not possible. ->" \
  606. "Overwriting old file." 1>&2
  607. rm -f "$gzfile.incomplete"
  608. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
  609. curlret="$?"
  610. fi
  611. if [ $curlret -ne 0 ] ; then
  612. case "$curlret" in
  613. 18)
  614. echo "WARNING: Got only some of the" \
  615. "file. A re-run of $0"
  616. echo "WARNING: is required to complete" \
  617. "the download." ;;
  618. 130)
  619. echo -e '\rWARNING: CURL got a SIGINT' \
  620. "(someone pressed Ctrl-C). A re-run of"
  621. echo "WARNING: $0 is required to complete" \
  622. "the download." ; sleep 1 ;;
  623. *)
  624. echo "$curlret $gzfile $url" \
  625. >> src/Download-Errors
  626. echo -e '\rERROR: CURL Returned Error' \
  627. "$curlret. Please read" \
  628. "the curl manpage." ;;
  629. esac
  630. return 1
  631. elif [ ! -s "$gzfile.incomplete" ] ; then
  632. echo "0 $gzfile $url" >> src/Download-Errors
  633. echo "ERROR: CURL returned success but" \
  634. "we have no data!"
  635. curlret=1
  636. else
  637. case "$gzfile" in
  638. *.gz|*.tgz)
  639. typeexpr="gzip compressed data" ;;
  640. *.bz2|*.tbz2)
  641. typeexpr="bzip2 compressed data" ;;
  642. *.Z|*.tZ)
  643. typeexpr="compress'd data" ;;
  644. *.zip|*.jar)
  645. typeexpr="Zip archive data" ;;
  646. *.tar)
  647. typeexpr="tar archive" ;;
  648. *)
  649. echo "WARNING: Unkown file extension: $gzfile"
  650. typeexpr="." ;;
  651. esac
  652. if file "$gzfile.incomplete" | grep -v "$typeexpr"
  653. then
  654. echo "ERROR: File type does not match" \
  655. "filename ($typeexpr)!"
  656. mv "$gzfile.incomplete" "$gzfile.extck-err"
  657. else
  658. mv "$gzfile.incomplete" "$gzfile"
  659. fi
  660. fi
  661. fi
  662. }
  663. list_dtags() {
  664. {
  665. descparser package/*/*/*.desc | grep '^\[D\] '
  666. grep -h '^[X0-9]' target/*/download.txt | sed 's,^,[D] ,'
  667. grep -h '^[X0-9]' target/*/*/download.txt | sed 's,^,[D] ,'
  668. } | column_clean
  669. }
  670. list_cksums() {
  671. trap '' INT
  672. list_dtags | sed \
  673. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  674. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" \
  675. | sed 's,^\(.*/\)[^/:]*:[^ ]* \([X0-9]*\) ,\2 \1,;' | cut -f1,2 -d' '
  676. trap INT
  677. }
  678. list() {
  679. trap '' INT
  680. list_dtags | sed \
  681. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  682. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" | awk '{print $2;}'
  683. trap INT
  684. }
  685. list_unknown() {
  686. trap '' INT
  687. mkdir -p src/ ; list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' > src/down.$$.lst
  688. ls download/{INDEX,README,DOWNTEST,LAST-UPDATE} \
  689. >> src/down.$$.lst 2> /dev/null
  690. find download/* -type f -o -type l | grep -v -e download/Mirror \
  691. -e download/Proxy -e download/Proxy-auth | \
  692. while read fn ; do
  693. grep -qx "$fn" src/down.$$.lst || echo "Unknown file: $fn"
  694. done
  695. rm -f src/down.$$.lst
  696. trap INT
  697. }
  698. list_missing() {
  699. trap '' INT
  700. list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
  701. while read fn ; do
  702. [ -f "$fn" ] || echo "$fn"
  703. done
  704. trap INT
  705. }
  706. repository() {
  707. for repository ; do
  708. packages `echo package/$repository/*/*.desc`
  709. done
  710. }
  711. smpatches() {
  712. submaster_url="http://www.rocklinux.net/submaster"
  713. id2url="s#\([0-9]\{4,4\}\)\([0-9]\{2,2\}\)\([0-9]*\)#$submaster_url/data/\1/\2/\3.patch#"
  714. {
  715. while [ -n "$1" ]
  716. do
  717. # convert $1 (smid) via $id2url to submaster url;
  718. # convert url to curl input for -K (url = "XXX")
  719. echo "$1" | sed "/[0-9]\{10\}/ {$id2url ; s/^\(.*\)$/url = \"\1\"/; p; }; d; "
  720. shift
  721. done
  722. } | curl --progress-bar $curl_options -K -
  723. }
  724. required() {
  725. # Choosen config must exist
  726. #
  727. if [ ! -f config/$config/packages ]; then
  728. echo "ERROR: Config $config doesn't exist."
  729. echo "ERROR: try ./scripts/Config -cfg $config first."
  730. exit 1
  731. fi
  732. local forkedpkg
  733. while read on a b repo pkg c ; do
  734. forkedpkg=${pkg%=*}
  735. [ "$forkedpkg" = "$pkg" ] || pkg=$forkedpkg
  736. if [ "$on" = "X" ] ; then
  737. descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ' > src/down.$$.lst
  738. while read tag cksum file url flags ; do
  739. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  740. done < src/down.$$.lst ; rm -f src/down.$$.lst
  741. fi
  742. done < config/$config/packages
  743. target=`grep '^export ROCKCFG_TARGET=' config/$config/config | \
  744. cut -f2 -d= | tr -d "'"`
  745. arch=`grep '^export ROCKCFG_ARCH=' config/$config/config | \
  746. cut -f2 -d= | tr -d "'"`
  747. for targetdl in target/$target/{,$arch/}download.txt ; do
  748. if [ -f $targetdl ] ; then
  749. while read cksum file url flags ; do
  750. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  751. done < $targetdl
  752. fi
  753. done
  754. }
  755. all() {
  756. local each repo pkg
  757. for each in package/*/*/*.desc; do
  758. pkg="`echo $each | cut -f3 -d/`"
  759. repo="`echo $each | cut -f2 -d/`"
  760. while read tag cksum file url flags ; do
  761. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  762. done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
  763. done
  764. for each in target/*/download.txt target/*/*/download.txt; do
  765. target="`echo $each | cut -f2 -d/`"
  766. while read cksum file url flags ; do
  767. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  768. done < <(cat $each)
  769. done
  770. }
  771. packages() {
  772. local descfile
  773. for arg; do
  774. case "$arg" in
  775. target/*)
  776. if [ ! -f $arg ]; then
  777. echo "Skipping \"$arg\" (not found)!"
  778. continue
  779. fi
  780. target="`echo $arg | cut -f2 -d/`"
  781. while read cksum file url flags ; do
  782. download_file "`source_file cksum $file url $flags`" \
  783. "$url" "$cksum" "$target"
  784. done < <(cat $arg)
  785. ;;
  786. *)
  787. if [ ! "${arg%.desc}" = "$arg" ]; then
  788. descfile=$arg
  789. else
  790. descfile="`echo package/*/$arg/$arg.desc`"
  791. fi
  792. if [ ! -f $descfile ]; then
  793. echo "Skipping \"$arg\" (not found)!"
  794. continue
  795. fi
  796. pkg="`echo $descfile | cut -f3 -d/`"
  797. repo="`echo $descfile | cut -f2 -d/`"
  798. while read tag cksum file url flags ; do
  799. download_file "`source_file cksum $file url $flags`" \
  800. "$url" "$cksum" "$repo" "$pkg"
  801. done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
  802. ;;
  803. esac
  804. done
  805. }
  806. mapped_packages() {
  807. if [ ! -f src/pkgmapper ]
  808. then
  809. mkdir -p src
  810. bash scripts/xfind.sh package/. -type f -name 'pkgmapper.in' \
  811. -printf '%f\t%p\n' | sort | awk '{ $1="."; print; }' > src/pkgmapper
  812. fi
  813. for pkg; do
  814. export xpkg=$pkg
  815. . src/pkgmapper
  816. packages $pkg
  817. done
  818. }
  819. # Things to do only for downloading
  820. #
  821. if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
  822. # Set proxy information
  823. if [ -f download/Proxy ]; then
  824. proxy="`cat download/Proxy`"
  825. if [ "$proxy" ]; then
  826. curl_options="$curl_options --proxy $proxy"
  827. else
  828. echo "INFO: No proxy information... removing download/Proxy." 1>&2
  829. rm download/Proxy
  830. fi
  831. fi
  832. if [ -f download/Proxy-auth ]; then
  833. proxyauth="`cat download/Proxy-auth`"
  834. if [ "$proxyauth" ]; then
  835. curl_options="$curl_options --proxy-user $proxyauth"
  836. else
  837. echo "INFO: No proxy-auth information... removing download/Proxy-auth." 1>&2
  838. rm download/Proxy-auth
  839. fi
  840. fi
  841. # Thing to do only once
  842. #
  843. if [ $this_is_the_2nd_run = 0 ]; then
  844. # am i using a proxy?
  845. # -- say i'm doing it even when i already did ;-)
  846. if [ "$proxy" ]; then
  847. echo "INFO: Setting proxy to $proxy." 1>&2
  848. fi
  849. if [ "$proxyauth" ]; then
  850. echo "INFO: Setting proxy authentication information." 1>&2
  851. fi
  852. # do mirror detection
  853. detect_mirror
  854. fi
  855. fi
  856. case "$1" in
  857. -list) list ;;
  858. -list-dtags) list_dtags ;;
  859. -list-unknown) list_unknown ;;
  860. -list-missing) list_missing ;;
  861. -list-cksums) list_cksums ;;
  862. -required) required ;;
  863. -all) all ;;
  864. -repository) shift ; repository "$@" ;;
  865. -sm) shift ; smpatches "$@" ;;
  866. -*|"") exec $0 -help ;;
  867. *) mapped_packages "$@" ;;
  868. esac
  869. exit 0