mirror of the now-defunct rocklinux.org
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

999 lines
28 KiB

  1. #!/bin/bash
  2. #
  3. # --- ROCK-COPYRIGHT-NOTE-BEGIN ---
  4. #
  5. # This copyright note is auto-generated by ./scripts/Create-CopyPatch.
  6. # Please add additional copyright information _after_ the line containing
  7. # the ROCK-COPYRIGHT-NOTE-END tag. Otherwise it might get removed by
  8. # the ./scripts/Create-CopyPatch script. Do not edit this copyright text!
  9. #
  10. # ROCK Linux: rock-src/scripts/Download
  11. # ROCK Linux is Copyright (C) 1998 - 2005 Clifford Wolf
  12. #
  13. # This program is free software; you can redistribute it and/or modify
  14. # it under the terms of the GNU General Public License as published by
  15. # the Free Software Foundation; either version 2 of the License, or
  16. # (at your option) any later version. A copy of the GNU General Public
  17. # License can be found at Documentation/COPYING.
  18. #
  19. # Many people helped and are helping developing ROCK Linux. Please
  20. # have a look at http://www.rocklinux.org/ and the Documentation/TEAM
  21. # file for details.
  22. #
  23. # --- ROCK-COPYRIGHT-NOTE-END ---
  24. #
  25. # Run this command from the ROCK directory as ./scripts/Download [ options ]
  26. #
  27. # It enables you to download source files as described in the package
  28. # definitions (optionally using a mirroring 'cache' server).
  29. #
  30. # This script also allows for checksum display/validation.
  31. umask 022
  32. . scripts/functions
  33. if [ "$1" = '-help' ] ; then
  34. { echo
  35. echo "Usage:"
  36. echo
  37. echo " ./scripts/Download [ options ] [ <Package(s)> ]"
  38. echo " ./scripts/Download [ options ] [ <Desc file(s)> ]"
  39. echo " ./scripts/Download [ options ] -repository <Repositories>"
  40. echo " ./scripts/Download [ options ] -sm <SM-PATCH-ID(s)>"
  41. echo " ./scripts/Download [ options ] { -all | -required }"
  42. echo
  43. echo " Download files required for given packages, package description files, package"
  44. echo " repositories, import packages, or build configurations."
  45. echo " On default, this script auto-detects the best ROCK Linux mirror."
  46. echo " See '-mirror none' output for help on bypassing the official mirrors."
  47. echo
  48. echo " -all download all files for a build configuration"
  49. echo " -required download only files for packages that will be built in"
  50. echo " the given configuration"
  51. echo
  52. echo " Where [ options ] is an alias for:"
  53. echo " [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
  54. echo " [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]"
  55. echo " [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
  56. echo " [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
  57. echo " [ -copy ] [ -move ]"
  58. echo
  59. echo " -cfg <config> download files for the given configuration"
  60. echo " -nock skip checksum checks (don't use lightly)"
  61. echo " -alt-dir <AlternativeDirectory>"
  62. echo " check for files to download also in the given directory"
  63. echo " -mirror <URL> set the download mirror to use"
  64. echo " Mirrors can also be local directories in the form"
  65. echo " of 'file:///<dir>'"
  66. echo " -check check checksums only; don't download files"
  67. echo " -try-questionable also try to download from URLs marked as"
  68. echo " questionable"
  69. echo " -notimeout don't apply timeout settings to curl"
  70. echo " -longtimeout apply long timeout settings"
  71. echo " By default, timeouts for connection and speed-limit"
  72. echo " are set"
  73. echo " -curl-opt <curl-option> pass option(s) to curl"
  74. echo " -proxy <server>[:<port>]"
  75. echo " -proxy-auth <username>[:<password>]"
  76. echo " pass proxy and proxy authentication to curl"
  77. echo " Warning: authentication can be seen with ps!"
  78. echo " -copy copy files from the old download directory layout,"
  79. echo " if existent"
  80. echo " -move move files instead"
  81. echo " Default is to link files, if existent, from the old"
  82. echo " layout to the new one"
  83. echo
  84. echo " ./scripts/Download -mk-cksum <Filename(s)>"
  85. echo " ./scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
  86. echo
  87. echo " -mk-cksum <Filenames> calculate checksums on files as used in package"
  88. echo " descriptions"
  89. echo " -list all files a complete download directory would contain"
  90. echo " -list-cksums as -list, with an checksum for each file"
  91. echo " -list-unknown files in the download directory that are not in any"
  92. echo " package description, e.g. old files"
  93. echo " -list-missing files in package descriptions that are not"
  94. echo " downloaded (yet)"
  95. echo ; } >&2
  96. exit 1
  97. fi
  98. # -mk-cksum mode (display ROCK type package checksum): it
  99. # displays the checksum ROCK validates against.
  100. #
  101. # Currently bz2, tbz2, gz, tgz, Z are unpacked
  102. #
  103. if [ "$1" = -mk-cksum ] ; then
  104. shift
  105. for x ; do
  106. echo -n "$x: "
  107. if [ ! -f "$x" ] ; then
  108. echo "No such file."
  109. elif [ "${x%.bz2}" != "$x" -o "${x%.tbz2}" != "$x" ] ; then
  110. bunzip2 < "$x" | cksum | cut -f1 -d' '
  111. elif [ "${x%.gz}" != "$x" -o "${x%.tgz}" != "$x" ] ; then
  112. gunzip < "$x" | cksum | cut -f1 -d' '
  113. elif [ "${x%.Z}" != "$x" ] ; then
  114. uncompress < "$x" | cksum | cut -f1 -d' '
  115. else
  116. cksum < "$x" | cut -f1 -d' '
  117. fi
  118. done
  119. exit 1
  120. fi
  121. # Handle options passed on the command line
  122. #
  123. mkdir -p src/ download/ ; config=default
  124. this_is_the_2nd_run=0
  125. mirror='' ; checkonly=0 ; altdir='' ; loop=1
  126. tryques=0 ; nocheck=0 ; options='-this_is_the_2nd_run '
  127. notimeout=0 ; curl_options='--disable-epsv --location'
  128. altcopy=link
  129. ignore_xpkg=1
  130. #
  131. while [ $loop -eq 1 ] ; do
  132. case "$1" in
  133. -this_is_the_2nd_run)
  134. this_is_the_2nd_run=1
  135. shift ;;
  136. -cfg)
  137. options="$options -cfg $2"
  138. config="$2" ; shift ; shift ;;
  139. -nock)
  140. # -nock skips checksum checking (don't use lightly)
  141. options="$options -nock"
  142. nocheck=1 ; shift ;;
  143. -mirror)
  144. # -mirror uses a mirror for finding source files
  145. if [ "$2" = none ]; then
  146. echo
  147. echo "The option '-mirror none' is not supported anymore!"
  148. echo
  149. echo "You may 'echo none > download/Mirror' if you really"
  150. echo "want to use the original download resources. However, this"
  151. echo "is not supported and if such a download fails, this is not"
  152. echo "a bug in ROCK Linux and doesn't neccessarily needs fixing."
  153. echo
  154. exit 1;
  155. else
  156. mkdir -p download
  157. echo "$2" > download/Mirror
  158. options="$options -mirror $2"
  159. mirror="$2"
  160. fi
  161. shift ; shift ;;
  162. -check)
  163. # -check just validates the file using the checksum
  164. options="$options -check"
  165. checkonly=1 ; shift ;;
  166. -notimeout)
  167. # don't add timeout curl options
  168. options="$options -notimeout"
  169. notimeout=2 ; shift ;;
  170. -longtimeout)
  171. # don't add timeout curl options
  172. options="$options -longtimeout"
  173. notimeout=1 ; shift ;;
  174. -curl-opt)
  175. # additional curl options
  176. options="$options -curl-opt $2"
  177. curl_options="$curl_options `echo $2 | tr : ' '`"
  178. shift ; shift ;;
  179. -proxy)
  180. # proxy option for curl
  181. mkdir -p download
  182. echo -n "$2" > download/Proxy
  183. options="$options -proxy $2"
  184. shift ; shift ;;
  185. -proxy-auth)
  186. # proxy authentication for curl - can be seen with ps!
  187. mkdir -p download
  188. echo -n "$2" > download/Proxy-auth
  189. chmod 600 download/Proxy-auth
  190. options="$options -proxy-auth $2"
  191. shift ; shift ;;
  192. -alt-dir)
  193. # check for an alternative directory where to search for
  194. # package source tarballs
  195. options="$options -alt-dir $2"
  196. altdir=$2 ; shift ; shift ;;
  197. -try-questionable)
  198. # also try to download questionable URLs
  199. options="$options -try-questionable"
  200. tryques=1 ; shift ;;
  201. -move) altcopy=move ; shift ;;
  202. -copy) altcopy=copy ; shift ;;
  203. *)
  204. loop=0 ;;
  205. esac
  206. done
  207. if [ $notimeout -eq 0 ] ; then
  208. curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
  209. fi
  210. if [ $notimeout -eq 1 ] ; then
  211. curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
  212. fi
  213. #Disable checking for certificates on https downloads
  214. curl_options="$curl_options -k"
  215. # build descparser if needed and run it
  216. # name=value pairs as arguments to this function are passed to the
  217. # descparser binary as environment variables
  218. descparser() {
  219. local var
  220. local descfiles
  221. if [ ! -f src/descparser ]; then
  222. mkdir -p src
  223. cc -o src/descparser misc/tools-source/descparser.c
  224. fi
  225. if [ "$ignore_xpkg" == "0" ]; then
  226. var=""
  227. else
  228. var="descparser_ign_xpkg=1"
  229. fi
  230. for arg ; do
  231. case $arg in
  232. *=*) var="$var $arg";;
  233. *) descfiles="$arg $descfiles";;
  234. esac
  235. done
  236. cat "${descfiles% }" | eval $var src/descparser
  237. }
  238. # cksum_chk filename cksum origfile
  239. #
  240. # This function verifies the checksum. If it fails it renames the file
  241. # to file.chksum-err and returns failure.
  242. #
  243. # It seams like the [ ] command has problems with comparing high numbers.
  244. # That's why I'm using a text comparison here.
  245. #
  246. # Not doing anything if checksum is '0' or a text of 'X'.
  247. #
  248. cksum_chk() {
  249. y="`echo $2 | sed 's,^0*,,;'`"
  250. [ $nocheck = 1 -o -z "$y" -o -z "${2//X/}" ] && return 0
  251. x="`cksum "$1" | cut -f1 -d' ' | sed 's,^0*,,;'`"
  252. if [ "$x" != "$y" ] ; then
  253. # Add .cksum-err extension to filename:
  254. echo "Cksum ERROR: $3.cksum-err ($x)"
  255. mv "$3" "$3.cksum-err" ; return 1
  256. fi
  257. return 0
  258. }
  259. # Autodetect best Mirror and safe url in $mirror
  260. #
  261. detect_mirror() {
  262. if [ -f download/Mirror ] ; then
  263. mirror="`cat download/Mirror`"
  264. if [ -z "$mirror" -o "$mirror" = "none" ] ; then
  265. echo "INFO: Found download/Mirror: none" \
  266. "(use the original download locations)" 1>&2
  267. else
  268. echo "INFO: Found cached mirror URL in download/Mirror:" 1>&2
  269. echo "INFO: $mirror" 1>&2
  270. fi
  271. echo "INFO: To force a new mirror auto-detection, remove download/Mirror." 1>&2
  272. else
  273. echo "INFO: Auto-detecting best mirror ..." 1>&2
  274. eval "$(egrep '^(rockver)=' scripts/parse-config)"
  275. echo "INFO: Downloading mirror-list from www.rocklinux.net." 1>&2
  276. curl -s -S $curl_options -o src/Download-Mirror-List \
  277. "http://www.rocklinux.net/mirrors.cgi?$rockver"
  278. bestval=0 ; result='No Mirror Found!'
  279. while read mirror_name ; do
  280. if [ "${mirror_name#=}" != "$mirror_name" ] ; then
  281. mirror_name="${mirror_name#= }"
  282. mirror_name="${mirror_name% =}"
  283. read mirror_url
  284. echo -n "INFO: Testing <$mirror_name> ..." 1>&2
  285. val="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST" \
  286. -w "ok %{speed_download}" -o /dev/null)"
  287. if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" ] ; then
  288. echo " error"
  289. else
  290. xval=`echo ${val#ok } | tr -d .` ; echo " $val"
  291. if [ "$xval" -gt "$bestval" ] ; then
  292. bestval=$xval ; mirror="${mirror_url%/}"
  293. result="Using mirror <$mirror>."
  294. fi
  295. fi
  296. fi
  297. done < src/Download-Mirror-List
  298. echo $mirror > download/Mirror
  299. echo "INFO: $result" 1>&2
  300. fi
  301. }
  302. # download_file local-filename download-location cksum repo pkg
  303. #
  304. # This function decides if download directly or from a mirror,
  305. # validates checksum, etc.
  306. # Calls download_file_now to do the actual download.
  307. #
  308. download_file() {
  309. # Init
  310. #
  311. local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
  312. # Make src directory for creating tar balls
  313. mkdir -p src/
  314. # Tarball file name: (if you change this one - also adapt Create-ISO)
  315. bzfile="`echo "$gzfile" | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,'`"
  316. # Lock file name:
  317. lkfile="src/down.lockfile.`echo $bzfile | tr / -`"
  318. # Check if it's already there
  319. #
  320. [ -s "$bzfile" -a $checkonly != 1 ] && return 0
  321. # Make locking
  322. #
  323. if [ -s "$lkfile" ]; then
  324. echo "Found $lkfile -> skip download."
  325. return 0
  326. fi
  327. trap 'rm -f "$lkfile"' INT
  328. echo $$ > "$lkfile"
  329. # Check if we only like to test the cksum(s)
  330. #
  331. if [ $checkonly = 1 ] ; then
  332. gzfile="$bzfile"
  333. if [ ! -f "$bzfile" ] ; then
  334. echo "File missing: $bzfile"
  335. rm -f "$lkfile" ; trap INT ; return 1
  336. fi
  337. if [ -z "${cksum##X*}" ] ; then
  338. echo "No checksum (ignore): $bzfile"
  339. rm -f "$lkfile" ; trap INT ; return 1
  340. fi
  341. if [ "$cksum" -eq 0 ] ; then
  342. echo "No checksum (missing): $bzfile"
  343. rm -f "$lkfile" ; trap INT ; return 1
  344. fi
  345. elif [ -s "$gzfile" ] ; then
  346. echo ; echo "Already downloaded $gzfile ..."
  347. else
  348. echo ; echo "Downloading $gzfile ..."
  349. # Existing *.cksum-err
  350. #
  351. if [ -s "$gzfile.cksum-err" ] ; then
  352. # cksum-err file alread exists:
  353. echo "ERROR: Found $gzfile.cksum-err."
  354. echo "ERROR: That means that we downloaded the" \
  355. "file already and it had an"
  356. echo "ERROR: incorrect checksum. Remove the" \
  357. "*.cksum-err file to force a"
  358. echo "ERROR: new download of that file."
  359. rm -f "$lkfile" ; trap INT ; return 1
  360. fi
  361. # Existing *.extck-err
  362. #
  363. if [ -s "$gzfile.extck-err" ] ; then
  364. # extck-err file alread exists:
  365. echo "ERROR: Found $gzfile.extck-err."
  366. echo "ERROR: That means that we downloaded the" \
  367. "file already and it's content"
  368. echo "ERROR: did not match it's filename extension." \
  369. "Remove the *.extck-err file"
  370. echo "ERROR: to force a new download of that file."
  371. rm -f "$lkfile" ; trap INT ; return 1
  372. fi
  373. # Questionable URL
  374. #
  375. if [ "$location" != "${location#\?}" ] ; then
  376. if [ "$tryques" = 0 ] ; then
  377. echo "ERROR: URL is marked as questionable." \
  378. "Not downloading this file."
  379. rm -f "$lkfile" ; trap INT ; return 1
  380. else
  381. echo "WARNING: URL is marked as questionable." \
  382. "Downloading it anyways."
  383. location="${location#\?}"
  384. fi
  385. fi
  386. # Make directory (if required)
  387. #
  388. if [ ! -d `dirname "$bzfile"` ] ; then
  389. mkdir -p `dirname "$bzfile"`
  390. fi
  391. # Alternative Directory
  392. #
  393. if [ "$altdir" ] ; then
  394. altfile=$(find $altdir/ -name `basename $bzfile` | head -n 1)
  395. else
  396. altfile=""
  397. fi
  398. #FIXME: compatibility, can be removed sooner or later...
  399. # Check old download dir layout
  400. if [ -z "$altfile" ]; then
  401. if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
  402. altfile="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
  403. fi
  404. fi
  405. if [ "$altfile" ] ; then
  406. echo "Found `basename $bzfile` as $altfile."
  407. if [ "$altcopy" = 'link' ]; then
  408. cp -lv $altfile $bzfile
  409. elif [ "$altcopy" = 'copy' ]; then
  410. cp -v $altfile $bzfile
  411. elif [ "$altcopy" = 'move' ]; then
  412. mv -v $altfile $bzfile
  413. fi
  414. gzfile="$bzfile"
  415. else
  416. # Mirroring
  417. #
  418. mirror="$( cat download/Mirror )"
  419. if [ -n "$mirror" -a "$mirror" != "none" -a -z "${bzfile##download/mirror/*}" ] ; then
  420. # try to use mirror
  421. if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
  422. echo "INFO: download from mirror failed, trying original URL." 1>&2
  423. download_file_now $location $gzfile $bzfile
  424. else
  425. gzfile="$bzfile"
  426. fi
  427. else
  428. # don't want to use mirror
  429. download_file_now $location $gzfile $bzfile
  430. fi
  431. fi
  432. if [ ! -s "$gzfile" ]; then
  433. rm -f "$lkfile" ; trap INT ; return 1
  434. fi
  435. fi
  436. # Convert a .gz to .bz2 and test checksum
  437. #
  438. if [ "$gzfile" != "$bzfile" ] ; then
  439. echo "bzip'ing + cksum-test: $gzfile"
  440. gunzip < "$gzfile" > src/down.$$.dat
  441. if cksum_chk src/down.$$.dat $cksum "$gzfile" ; then
  442. bzip2 < src/down.$$.dat > "$bzfile" ; rm -f "$gzfile"
  443. fi
  444. rm -f src/down.$$.dat
  445. # Execute a cksum test on a bzip2 file
  446. #
  447. elif [ "${gzfile%.bz2}" != "$gzfile" -o \
  448. "${gzfile%.tbz2}" != "$gzfile" ]
  449. then
  450. echo "cksum-test (bzip2): $bzfile"
  451. if [ $nocheck = 0 ] ; then
  452. bunzip2 < "$bzfile" > src/down.$$.dat
  453. cksum_chk src/down.$$.dat $cksum "$bzfile"
  454. fi
  455. rm -f src/down.$$.dat
  456. # Execute a cksum test on a raw data file
  457. #
  458. else
  459. echo "cksum-test (raw): $gzfile"
  460. cksum_chk "$gzfile" $cksum "$gzfile"
  461. fi
  462. # Free Lock and finish
  463. #
  464. rm -f "$lkfile" ; trap INT ; return 0
  465. }
  466. # download_file_now location remote_filename local_filename
  467. #
  468. # This function executes the actual download using curl.
  469. #
  470. download_file_now() {
  471. local location="$1" gzfile="$2" bzfile="$3"
  472. # Create URL
  473. #
  474. if [ "${location#!}" != "$location" ] ; then
  475. url="`echo "$location" | sed 's,!,,'`"
  476. else
  477. url="`echo "$location" | \
  478. sed 's,/[^/]*$,,'`/`echo $gzfile | sed 's,.*/,,'`"
  479. fi
  480. # Check for existing Error Log
  481. #
  482. if test -s src/Download-Errors &&
  483. grep -q " $url\$" src/Download-Errors ; then
  484. echo "ERROR: According to src/Download-Errors" \
  485. "we had already an error for the URL"
  486. echo "ERROR: $url"
  487. echo "ERROR: So I'm not trying to download" \
  488. "it again (remove src/Download-Errors"
  489. echo "ERROR: if you want to force a retry)."
  490. return 1
  491. fi
  492. # Download
  493. #
  494. if [[ $url = svn://* ]] ; then
  495. # svn://mode:[login[:password]@]server[:port]:/path::revision/
  496. urlorig=${url}
  497. url=${location#!}
  498. url="${url#svn://}"; url="${url%/}"
  499. mode="${url%%:*}"
  500. url="${url#*:}"
  501. if [ "${url%@*}" = "${url}" ] ; then
  502. username=""
  503. password=""
  504. else
  505. username="${url%%@*}"
  506. if [ "${username%:*}" != "${username}" ] ; then
  507. password="--password ${username#*:}"
  508. username="${username%%:*}"
  509. fi
  510. username="--username ${username}"
  511. fi
  512. url="${url##*@}"
  513. rev="${url##*::}"
  514. if [ -z "${rev}" -o "${rev}" == "${url}" ] ; then
  515. rev=""
  516. else
  517. rev="-r ${rev}"
  518. fi
  519. url="${url%::*}"
  520. old=${PWD}
  521. tmp="`mktemp`"
  522. rm -rf ${tmp}
  523. dir=${bzfile%.tar.bz2}
  524. dir="`basename ${dir}`"
  525. mkdir -p ${tmp}
  526. cd ${tmp}
  527. echo SVN ${username} ${password} ${rev} ${mode}://${url}
  528. { echo svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
  529. if ! svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
  530. then touch .svn_error ; fi
  531. } &> .svn_output &
  532. while fuser .svn_output &> /dev/null ; do
  533. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  534. 'SVN archive so far...\r'
  535. sleep 3
  536. done
  537. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  538. 'SVN archive (download finished).'
  539. if [ ! -f .svn_error ] ; then
  540. rm -f .svn_output
  541. tar --owner root --group root \
  542. --use-compress-program=bzip2 \
  543. -cf ${dir}.tar.bz2 ${dir}
  544. mv ${dir}.tar.bz2 ${old}/${bzfile}
  545. cd ${old} ; rm -rf ${tmp}
  546. else
  547. cat .svn_output
  548. cd $old ; rm -rf $tmp
  549. echo ERROR: SVN export ${username} ${password} ${mode}://${url} ${dir} \
  550. returned an error.
  551. echo "0 $gzfile $urlorig" >> src/Download-Errors
  552. fi
  553. elif [[ $url = cvs://* ]] ; then
  554. # cvs://mode:[login[:password]@]server[:port]:/path::module!revision/
  555. # btw, at least current cvs supports password at CVSROOT.
  556. url="${url#cvs://}"; url="${url%/*}"
  557. # cvs://mode:loc::module!date/
  558. #
  559. mode="${url%%:*}"; loc="${url#*:}"
  560. module="${loc##*::}"; loc="${loc%%::*}"
  561. revision="${module#*!}"; module="${module%%!*}"
  562. [[ $loc != *@* ]] && loc="anonymous@$loc"
  563. # everything after the first 'bang' (!) is analysed here
  564. # someday we could add more cvs options.
  565. #
  566. dat="$( echo $revision | \
  567. sed -n -e 's,\([0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}\),-D \1,p' )"
  568. cvsdir="src/down.cvsdir.`echo $bzfile | tr / -`"
  569. saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir
  570. echo CVS $mode $loc $dat $module
  571. { [ $mode = ssh ] && export CVS_RSH=ssh
  572. [ $mode = pserver ] && loc=":pserver:$loc"
  573. # sometimes cvs wants to read ~/.cvspass just for fun ..
  574. touch $HOME/.cvspass
  575. # for ssh we need some way to quitely accept the key ...
  576. echo cvs -z9 -Q -d $loc checkout $dat -P $module
  577. if ! cvs -z9 -Q -d $loc checkout $dat -P $module
  578. then touch .cvs_error ; fi
  579. } &> .cvs_output &
  580. while fuser .cvs_output &> /dev/null ; do
  581. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  582. 'CVS archive so far...\r'
  583. sleep 3
  584. done
  585. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  586. 'CVS archive (download finished).'
  587. if [ ! -f .cvs_error ] ; then
  588. cd `dirname $module`
  589. dir="`echo "$bzfile" | sed s/\.tar\.bz2$//`"
  590. dir="`basename $dir`"
  591. mv `basename $module` $dir
  592. tar --owner root --group root \
  593. --use-compress-program=bzip2 \
  594. -cf $dir.tar.bz2 $dir
  595. mv $dir.tar.bz2 $saved_pwd/$bzfile
  596. cd $saved_pwd ; rm -rf $cvsdir
  597. else
  598. cat .cvs_output
  599. cd $saved_pwd ; rm -rf $cvsdir
  600. echo ERROR: CVS $dat $loc $module \
  601. returned an error.
  602. echo "0 $gzfile $url" >> src/Download-Errors
  603. fi
  604. else
  605. if [ -s "$gzfile.incomplete" ] ; then
  606. echo "INFO: Trying to resume previous download .." 1>&2
  607. resume="-C -"
  608. else
  609. resume=""
  610. fi
  611. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
  612. curlret="$?"
  613. if [ "$resume" ] && \
  614. [ $curlret -eq 33 -o $curlret -eq 36 ] ; then
  615. echo "INFO: Resuming download not possible. ->" \
  616. "Overwriting old file." 1>&2
  617. rm -f "$gzfile.incomplete"
  618. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
  619. curlret="$?"
  620. fi
  621. if [ $curlret -ne 0 ] ; then
  622. case "$curlret" in
  623. 18)
  624. echo "WARNING: Got only some of the" \
  625. "file. A re-run of $0"
  626. echo "WARNING: is required to complete" \
  627. "the download." ;;
  628. 130)
  629. echo -e '\rWARNING: CURL got a SIGINT' \
  630. "(someone pressed Ctrl-C). A re-run of"
  631. echo "WARNING: $0 is required to complete" \
  632. "the download." ; sleep 1 ;;
  633. *)
  634. echo "$curlret $gzfile $url" \
  635. >> src/Download-Errors
  636. echo -e '\rERROR: CURL Returned Error' \
  637. "$curlret. Please read" \
  638. "the curl manpage." ;;
  639. esac
  640. return 1
  641. elif [ ! -s "$gzfile.incomplete" ] ; then
  642. echo "0 $gzfile $url" >> src/Download-Errors
  643. echo "ERROR: CURL returned success but" \
  644. "we have no data!"
  645. curlret=1
  646. else
  647. case "$gzfile" in
  648. *.gz|*.tgz)
  649. typeexpr="gzip compressed data" ;;
  650. *.bz2|*.tbz2)
  651. typeexpr="bzip2 compressed data" ;;
  652. *.Z|*.tZ)
  653. typeexpr="compress'd data" ;;
  654. *.zip|*.jar)
  655. typeexpr="Zip archive data" ;;
  656. *.tar)
  657. typeexpr="tar archive" ;;
  658. *)
  659. echo "WARNING: Unkown file extension: $gzfile"
  660. typeexpr="." ;;
  661. esac
  662. if file "$gzfile.incomplete" | grep -v "$typeexpr"
  663. then
  664. echo "ERROR: File type does not match" \
  665. "filename ($typeexpr)!"
  666. mv "$gzfile.incomplete" "$gzfile.extck-err"
  667. else
  668. mv "$gzfile.incomplete" "$gzfile"
  669. fi
  670. fi
  671. fi
  672. }
  673. list_dtags() {
  674. {
  675. descparser package/*/*/*.desc | grep '^\[D\] '
  676. grep -h '^[X0-9]' target/*/download.txt | sed 's,^,[D] ,'
  677. grep -h '^[X0-9]' target/*/*/download.txt | sed 's,^,[D] ,'
  678. } | column_clean
  679. }
  680. list_cksums() {
  681. trap '' INT
  682. list_dtags | sed \
  683. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  684. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" \
  685. | sed 's,^\(.*/\)[^/:]*:[^ ]* \([X0-9]*\) ,\2 \1,;' | cut -f1,2 -d' '
  686. trap INT
  687. }
  688. list() {
  689. trap '' INT
  690. list_dtags | sed \
  691. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  692. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" | awk '{print $2;}'
  693. trap INT
  694. }
  695. list_unknown() {
  696. trap '' INT
  697. mkdir -p src/ ; list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' > src/down.$$.lst
  698. ls download/{INDEX,README,DOWNTEST,LAST-UPDATE} \
  699. >> src/down.$$.lst 2> /dev/null
  700. find download/* -type f -o -type l | grep -v -e download/Mirror \
  701. -e download/Proxy -e download/Proxy-auth | \
  702. while read fn ; do
  703. grep -qx "$fn" src/down.$$.lst || echo "Unknown file: $fn"
  704. done
  705. rm -f src/down.$$.lst
  706. trap INT
  707. }
  708. list_missing() {
  709. trap '' INT
  710. list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
  711. while read fn ; do
  712. [ -f "$fn" ] || echo "$fn"
  713. done
  714. trap INT
  715. }
  716. repository() {
  717. for repository ; do
  718. packages `echo package/$repository/*/*.desc`
  719. done
  720. }
  721. smpatches() {
  722. submaster_url="http://www.rocklinux.net/submaster"
  723. id2url="s#\([0-9]\{4,4\}\)\([0-9]\{2,2\}\)\([0-9]*\)#$submaster_url/data/\1/\2/\3.patch#"
  724. {
  725. while [ -n "$1" ]
  726. do
  727. # convert $1 (smid) via $id2url to submaster url;
  728. # convert url to curl input for -K (url = "XXX")
  729. echo "$1" | sed "/[0-9]\{10\}/ {$id2url ; s/^\(.*\)$/url = \"\1\"/; p; }; d; "
  730. shift
  731. done
  732. } | curl --progress-bar $curl_options -K -
  733. }
  734. required() {
  735. # Choosen config must exist
  736. #
  737. if [ ! -f config/$config/packages ]; then
  738. echo "ERROR: Config $config doesn't exist."
  739. echo "ERROR: try ./scripts/Config -cfg $config first."
  740. exit 1
  741. fi
  742. ignore_xpkg=0
  743. local forkedpkg
  744. while read on a b repo pkg c ; do
  745. forkedpkg=${pkg#*=}
  746. [ "$forkedpkg" = "$pkg" ] || pkg=${pkg%=*}
  747. if [ "$on" = "X" ] ; then
  748. descparser "xpkg=$forkedpkg" "package/$repo/$pkg/$pkg.desc" | grep '^\[D\] ' > src/down.$$.lst
  749. while read tag cksum file url flags ; do
  750. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  751. done < src/down.$$.lst ; rm -f src/down.$$.lst
  752. fi
  753. done < config/$config/packages
  754. target=`grep '^export ROCKCFG_TARGET=' config/$config/config | \
  755. cut -f2 -d= | tr -d "'"`
  756. arch=`grep '^export ROCKCFG_ARCH=' config/$config/config | \
  757. cut -f2 -d= | tr -d "'"`
  758. for targetdl in target/$target/{,$arch/}download.txt ; do
  759. if [ -f $targetdl ] ; then
  760. while read cksum file url flags ; do
  761. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  762. done < $targetdl
  763. fi
  764. done
  765. }
  766. all() {
  767. local each repo pkg
  768. for each in package/*/*/*.desc; do
  769. pkg="`echo $each | cut -f3 -d/`"
  770. repo="`echo $each | cut -f2 -d/`"
  771. while read tag cksum file url flags ; do
  772. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  773. done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
  774. done
  775. for each in target/*/download.txt target/*/*/download.txt; do
  776. target="`echo $each | cut -f2 -d/`"
  777. while read cksum file url flags ; do
  778. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  779. done < <(cat $each)
  780. done
  781. }
  782. packages() {
  783. local descfile
  784. local forkedpkg
  785. for arg; do
  786. # Check if this is a forked package name
  787. case "$arg" in
  788. *=*) ignore_xpkg=0; forkedpkg="xpkg=${arg#*=}"; arg=${arg%=*};;
  789. *) ignore_xpkg=1; forkedpkg="";;
  790. esac
  791. case "$arg" in
  792. target/*)
  793. if [ ! -f $arg ]; then
  794. echo "Skipping \"$arg\" (not found)!"
  795. continue
  796. fi
  797. target="`echo $arg | cut -f2 -d/`"
  798. while read cksum file url flags ; do
  799. download_file "`source_file cksum $file url $flags`" \
  800. "$url" "$cksum" "$target"
  801. done < <(cat $arg)
  802. ;;
  803. *)
  804. if [ ! "${arg%.desc}" = "$arg" ]; then
  805. descfile=$arg
  806. else
  807. descfile="`echo package/*/$arg/$arg.desc`"
  808. fi
  809. if [ ! -f $descfile ]; then
  810. echo "Skipping \"$arg\" (not found)!"
  811. continue
  812. fi
  813. pkg="`echo $descfile | cut -f3 -d/`"
  814. repo="`echo $descfile | cut -f2 -d/`"
  815. while read tag cksum file url flags ; do
  816. download_file "`source_file cksum $file url $flags`" \
  817. "$url" "$cksum" "$repo" "$pkg"
  818. done < <(descparser $forkedpkg package/$repo/$pkg/$pkg.desc |
  819. grep '^\[D\] ')
  820. ;;
  821. esac
  822. done
  823. }
  824. mapped_packages() {
  825. if [ ! -f src/pkgmapper ]
  826. then
  827. mkdir -p src
  828. bash scripts/xfind.sh package/. -type f -name 'pkgmapper.in' \
  829. -printf '%f\t%p\n' | sort | awk '{ $1="."; print; }' > src/pkgmapper
  830. fi
  831. for pkg; do
  832. export xpkg=$pkg
  833. . src/pkgmapper
  834. packages $pkg
  835. done
  836. }
  837. # Things to do only for downloading
  838. #
  839. if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
  840. # Set proxy information
  841. if [ -f download/Proxy ]; then
  842. proxy="`cat download/Proxy`"
  843. if [ "$proxy" ]; then
  844. curl_options="$curl_options --proxy $proxy"
  845. else
  846. echo "INFO: No proxy information... removing download/Proxy." 1>&2
  847. rm download/Proxy
  848. fi
  849. fi
  850. if [ -f download/Proxy-auth ]; then
  851. proxyauth="`cat download/Proxy-auth`"
  852. if [ "$proxyauth" ]; then
  853. curl_options="$curl_options --proxy-user $proxyauth"
  854. else
  855. echo "INFO: No proxy-auth information... removing download/Proxy-auth." 1>&2
  856. rm download/Proxy-auth
  857. fi
  858. fi
  859. # Thing to do only once
  860. #
  861. if [ $this_is_the_2nd_run = 0 ]; then
  862. # am i using a proxy?
  863. # -- say i'm doing it even when i already did ;-)
  864. if [ "$proxy" ]; then
  865. echo "INFO: Setting proxy to $proxy." 1>&2
  866. fi
  867. if [ "$proxyauth" ]; then
  868. echo "INFO: Setting proxy authentication information." 1>&2
  869. fi
  870. # do mirror detection
  871. detect_mirror
  872. fi
  873. fi
  874. case "$1" in
  875. -list) list ;;
  876. -list-dtags) list_dtags ;;
  877. -list-unknown) list_unknown ;;
  878. -list-missing) list_missing ;;
  879. -list-cksums) list_cksums ;;
  880. -required) required ;;
  881. -all) all ;;
  882. -repository) shift ; repository "$@" ;;
  883. -sm) shift ; smpatches "$@" ;;
  884. -*|"") exec $0 -help ;;
  885. *) mapped_packages "$@" ;;
  886. esac
  887. exit 0