mirror of the now-defunct rocklinux.org
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1095 lines
30 KiB

  1. #!/bin/bash
  2. #
  3. # --- ROCK-COPYRIGHT-NOTE-BEGIN ---
  4. #
  5. # This copyright note is auto-generated by ./scripts/Create-CopyPatch.
  6. # Please add additional copyright information _after_ the line containing
  7. # the ROCK-COPYRIGHT-NOTE-END tag. Otherwise it might get removed by
  8. # the ./scripts/Create-CopyPatch script. Do not edit this copyright text!
  9. #
  10. # ROCK Linux: rock-src/scripts/Download
  11. # ROCK Linux is Copyright (C) 1998 - 2006 Clifford Wolf
  12. #
  13. # This program is free software; you can redistribute it and/or modify
  14. # it under the terms of the GNU General Public License as published by
  15. # the Free Software Foundation; either version 2 of the License, or
  16. # (at your option) any later version. A copy of the GNU General Public
  17. # License can be found at Documentation/COPYING.
  18. #
  19. # Many people helped and are helping developing ROCK Linux. Please
  20. # have a look at http://www.rocklinux.org/ and the Documentation/TEAM
  21. # file for details.
  22. #
  23. # --- ROCK-COPYRIGHT-NOTE-END ---
  24. #
  25. # Run this command from the ROCK directory as ./scripts/Download [ options ]
  26. #
  27. # It enables you to download source files as described in the package
  28. # definitions (optionally using a mirroring 'cache' server).
  29. #
  30. # This script also allows for checksum display/validation.
  31. umask 022
  32. . scripts/functions
  33. if [ "$1" = '-help' ] ; then
  34. { echo
  35. echo "Usage:"
  36. echo
  37. echo " ./scripts/Download [ options ] [ <Package(s)> ]"
  38. echo " ./scripts/Download [ options ] [ <Desc file(s)> ]"
  39. echo " ./scripts/Download [ options ] -repository <Repositories>"
  40. echo " ./scripts/Download [ options ] -sm <SM-PATCH-ID(s)>"
  41. echo " ./scripts/Download [ options ] { -all | -required }"
  42. echo
  43. echo " Download files required for given packages, package description files, package"
  44. echo " repositories, import packages, or build configurations."
  45. echo " On default, this script auto-detects the best ROCK Linux mirror."
  46. echo
  47. echo " -all download all files for a build configuration"
  48. echo " -required download only files for packages that will be built in"
  49. echo " the given configuration"
  50. echo
  51. echo " Where [ options ] is an alias for:"
  52. echo " [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
  53. echo " [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]"
  54. echo " [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
  55. echo " [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
  56. echo " [ -copy ] [ -move ] [ -cksumpatch ]"
  57. echo
  58. echo " -cfg <config> download files for the given configuration"
  59. echo " -nock skip checksum checks (don't use lightly)"
  60. echo " -alt-dir <AlternativeDirectory>"
  61. echo " check for files to download also in the given directory"
  62. echo " -mirror <URL> set the download mirror to use"
  63. echo " Mirrors can also be local directories in the form"
  64. echo " of 'file:///<dir>'"
  65. echo " Use '-mirror auto' to autodetect a new best mirror server"
  66. echo " Use '-mirror none' to bypass the official mirrors"
  67. echo " -check check checksums only; don't download files"
  68. echo " -try-questionable also try to download from URLs marked as"
  69. echo " questionable"
  70. echo " -notimeout don't apply timeout settings to curl"
  71. echo " -longtimeout apply long timeout settings"
  72. echo " By default, timeouts for connection and speed-limit"
  73. echo " are set"
  74. echo " -curl-opt <curl-option> pass option(s) to curl"
  75. echo " -proxy <server>[:<port>]"
  76. echo " -proxy-auth <username>[:<password>]"
  77. echo " pass proxy and proxy authentication to curl"
  78. echo " Warning: authentication can be seen with ps!"
  79. echo " -copy copy files from the old download directory layout,"
  80. echo " if existent"
  81. echo " -move move files instead"
  82. echo " Default is to link files, if existent, from the old"
  83. echo " layout to the new one"
  84. echo " -cksumpatch Patch the checksum in the .desc file after downloading."
  85. echo
  86. echo " ./scripts/Download -mk-cksum <Filename(s)>"
  87. echo " ./scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
  88. echo
  89. echo " -mk-cksum <Filenames> calculate checksums on files as used in package"
  90. echo " descriptions"
  91. echo " -list all files a complete download directory would contain"
  92. echo " -list-cksums as -list, with a checksum for each file"
  93. echo " -list-unknown files in the download directory that are not in any"
  94. echo " package description, e.g. old files"
  95. echo " -list-missing files in package descriptions that are not"
  96. echo " downloaded (yet)"
  97. echo ; } >&2
  98. exit 1
  99. fi
  100. # -mk-cksum mode (display ROCK type package checksum): it
  101. # displays the checksum ROCK validates against.
  102. #
  103. # Currently bz2, tbz2, gz, tgz, Z are unpacked
  104. #
  105. if [ "$1" = -mk-cksum ] ; then
  106. shift
  107. for x ; do
  108. echo -n "$x: "
  109. if [ ! -f "$x" ] ; then
  110. echo "No such file."
  111. elif [ "${x%.bz2}" != "$x" -o "${x%.tbz2}" != "$x" ] ; then
  112. bunzip2 < "$x" | cksum | cut -f1 -d' '
  113. elif [ "${x%.gz}" != "$x" -o "${x%.tgz}" != "$x" ] ; then
  114. gunzip < "$x" | cksum | cut -f1 -d' '
  115. elif [ "${x%.Z}" != "$x" ] ; then
  116. uncompress < "$x" | cksum | cut -f1 -d' '
  117. else
  118. cksum < "$x" | cut -f1 -d' '
  119. fi
  120. done
  121. exit 1
  122. fi
  123. # Handle options passed on the command line
  124. #
  125. mkdir -p src/ download/ ; config=default
  126. this_is_the_2nd_run=0
  127. mirror='' ; checkonly=0 ; altdir='' ; loop=1
  128. tryques=0 ; nocheck=0 ; options='-this_is_the_2nd_run '
  129. notimeout=0 ; curl_options='--disable-epsv --location'
  130. altcopy=link ; cksumpatch=0
  131. ignore_xpkg=1
  132. if [ -f download/Mirror ]; then mirror="`cat download/Mirror`" ; fi
  133. #
  134. while [ $loop -eq 1 ] ; do
  135. case "$1" in
  136. -this_is_the_2nd_run)
  137. this_is_the_2nd_run=1
  138. shift ;;
  139. -cfg)
  140. options="$options -cfg $2"
  141. config="$2" ; shift ; shift ;;
  142. -nock)
  143. # -nock skips checksum checking (don't use lightly)
  144. options="$options -nock"
  145. nocheck=1 ; shift ;;
  146. -mirror)
  147. # -mirror uses a mirror for finding source files
  148. if [ "$2" = none ]; then
  149. echo
  150. echo "WARNING: The option '-mirror none' is not supported anymore!"
  151. echo
  152. echo "WARNING: You may use '-mirror none' if you really want to use the"
  153. echo "WARNING: original download resources. However, this is not"
  154. echo "WARNING: supported and if such a download fails, this is not a bug"
  155. echo "WARNING: in ROCK Linux and doesn't necessarily need fixing."
  156. echo
  157. else
  158. echo "$2" > download/Mirror
  159. fi
  160. options="$options -mirror $2"
  161. mirror="$2"
  162. shift 2 ;;
  163. -check)
  164. # -check just validates the file using the checksum
  165. options="$options -check"
  166. checkonly=1 ; shift ;;
  167. -notimeout)
  168. # don't add timeout curl options
  169. options="$options -notimeout"
  170. notimeout=2 ; shift ;;
  171. -longtimeout)
  172. # don't add timeout curl options
  173. options="$options -longtimeout"
  174. notimeout=1 ; shift ;;
  175. -curl-opt)
  176. # additional curl options
  177. options="$options -curl-opt $2"
  178. curl_options="$curl_options `echo $2 | tr : ' '`"
  179. shift ; shift ;;
  180. -proxy)
  181. # proxy option for curl
  182. mkdir -p download
  183. echo -n "$2" > download/Proxy
  184. options="$options -proxy $2"
  185. shift ; shift ;;
  186. -proxy-auth)
  187. # proxy authentication for curl - can be seen with ps!
  188. mkdir -p download
  189. echo -n "$2" > download/Proxy-auth
  190. chmod 600 download/Proxy-auth
  191. options="$options -proxy-auth $2"
  192. shift ; shift ;;
  193. -alt-dir)
  194. # check for an alternative directory where to search for
  195. # package source tarballs
  196. options="$options -alt-dir $2"
  197. altdir=$2 ; shift ; shift ;;
  198. -try-questionable)
  199. # also try to download questionable URLs
  200. options="$options -try-questionable"
  201. tryques=1 ; shift ;;
  202. -move) altcopy=move ; shift ;;
  203. -copy) altcopy=copy ; shift ;;
  204. -cksumpatch) cksumpatch=1 ; shift ;;
  205. *)
  206. loop=0 ;;
  207. esac
  208. done
  209. if [ $notimeout -eq 0 ] ; then
  210. curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
  211. fi
  212. if [ $notimeout -eq 1 ] ; then
  213. curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
  214. fi
  215. #Disable checking for certificates on https downloads
  216. curl_options="$curl_options -k"
  217. # build descparser if needed and run it
  218. # name=value pairs as arguments to this function are passed to the
  219. # descparser binary as environment variables
  220. descparser() {
  221. local var
  222. local descfiles
  223. if [ ! -f src/descparser ]; then
  224. mkdir -p src
  225. cc -o src/descparser misc/tools-source/descparser.c
  226. fi
  227. if [ "$ignore_xpkg" == "0" ]; then
  228. var=""
  229. else
  230. var="descparser_ign_xpkg=1"
  231. fi
  232. for arg ; do
  233. case $arg in
  234. *=*) var="$var $arg";;
  235. *) descfiles="$arg $descfiles";;
  236. esac
  237. done
  238. cat $descfiles | eval $var src/descparser
  239. }
  240. # cksum_chk filename cksum origfile
  241. #
  242. # This function verifies the checksum. If it fails it renames the file
  243. # to file.chksum-err and returns failure.
  244. #
  245. # It seams like the [ ] command has problems with comparing high numbers.
  246. # That's why I'm using a text comparison here.
  247. #
  248. # Not doing anything if checksum is '0' or a text of 'X'.
  249. #
  250. cksum_chk() {
  251. local file="$1" cksum="$2" orig="$3"
  252. do_cksum_chk $cksum || return 0
  253. x="`cksum "$file" | cut -f1 -d' ' | sed 's,^0*,,;'`"
  254. y="`echo $cksum | sed 's,^0*,,;'`"
  255. if [ "$x" != "$y" ] ; then
  256. # Add .cksum-err extension to filename:
  257. echo "Cksum ERROR: $orig.cksum-err ($x)"
  258. mv "$orig" "$orig.cksum-err" ; return 1
  259. fi
  260. return 0
  261. }
  262. # Determine if a check should be done on files with the given cksum.
  263. #
  264. do_cksum_chk() {
  265. local cksum="$1" y="`echo $cksum | sed 's,^0*,,;'`";
  266. [ $nocheck = 1 -o -z "$y" -o -z "${cksum//X/}" ] && return 1
  267. return 0
  268. }
  269. # Autodetect best Mirror and safe url in $mirror
  270. #
  271. detect_mirror() {
  272. if [ -n "$mirror" -a "$mirror" != "auto" ] ; then
  273. if [ "$mirror" = "none" ] ; then
  274. echo "INFO: Using download mirror: none" \
  275. "(use the original download locations)" 1>&2
  276. else
  277. echo "INFO: Using download mirror:" 1>&2
  278. echo "INFO: $mirror" 1>&2
  279. fi
  280. echo "INFO: To force a new mirror auto-detection, use '-mirror auto'." 1>&2
  281. else
  282. echo "INFO: Auto-detecting best mirror ..." 1>&2
  283. eval "$(egrep '^(rockver)=' scripts/parse-config)"
  284. echo "INFO: Downloading mirror-list from www.rocklinux.net." 1>&2
  285. curl -s -S $curl_options -o src/Download-Mirror-List \
  286. "http://www.rocklinux.net/mirrors.cgi?$rockver"
  287. bestval=0 ; result='No Mirror Found!'
  288. while read mirror_name ; do
  289. if [ "${mirror_name#=}" != "$mirror_name" ] ; then
  290. mirror_name="${mirror_name#= }"
  291. mirror_name="${mirror_name% =}"
  292. read mirror_url
  293. echo -n "INFO: Testing <$mirror_name> ..." 1>&2
  294. val="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST" \
  295. -w "ok %{speed_download}" -o /dev/null)"
  296. if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" -o "$val" = "ok 0,000" ] ; then
  297. echo " error"
  298. else
  299. xval=`echo ${val#ok } | tr -d .,` ; echo " $val"
  300. if [ "$xval" -gt "$bestval" ] ; then
  301. bestval=$xval ; mirror="${mirror_url%/}"
  302. result="Using mirror <$mirror>."
  303. fi
  304. fi
  305. fi
  306. done < src/Download-Mirror-List
  307. echo $mirror > download/Mirror
  308. echo "INFO: $result" 1>&2
  309. fi
  310. }
  311. # Check if static download location is available
  312. #
  313. is_static() {
  314. local repo="$1" pkg="$2" gzfile="$3" url="$4"
  315. local file="package/$repo/$pkg/$pkg.desc"
  316. local pattern="^\[D\].*${gzfile##*/}.*NOAUTO"
  317. if [ "`grep "$file" -e"$pattern"`" != "" ]; then
  318. mkdir -p "${gzfile%/*}"
  319. echo "INFO: File ${gzfile##*/} needs to be downloaded manually."
  320. echo "INFO: Download the file from"
  321. echo "INFO: $url"
  322. echo "INFO: and place it in ${gzfile%/*}"
  323. return 1
  324. fi
  325. return 0
  326. }
  327. # Create a checksum patch for a package or target if requested
  328. #
  329. create_cksumpatch () {
  330. local repo="$1" pkg="$2"
  331. if [ $cksumpatch = 1 ] ; then
  332. if [ "$pkg" ] ; then
  333. # patch for a package
  334. ./scripts/Create-CkSumPatch $pkg | patch -p0
  335. else
  336. # patch for a target download
  337. for x in target/$repo/{,*/}download.txt ; do
  338. [ -f "$x" ] && echo "$x"
  339. done | xargs ./scripts/Create-CkSumPatch $x \
  340. | patch -p0
  341. fi
  342. fi
  343. }
  344. # download_file local-filename download-location cksum repo pkg
  345. #
  346. # This function decides if download directly or from a mirror,
  347. # validates checksum, etc.
  348. # Calls download_file_now to do the actual download.
  349. #
  350. download_file() {
  351. # Init
  352. #
  353. local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
  354. # Make src directory for creating tar balls
  355. mkdir -p src/
  356. # Tarball file name: (if you change this one - also adapt Create-ISO)
  357. bzfile="`echo "$gzfile" | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,'`"
  358. # Lock file name:
  359. lkfile="src/down.lockfile.`echo $bzfile | tr / -`"
  360. # Check if it's already there
  361. #
  362. if [ -s "$bzfile" -a $checkonly != 1 ] ; then
  363. create_cksumpatch $repo $pkg
  364. return 0
  365. fi
  366. # Make locking
  367. #
  368. if [ -s "$lkfile" ]; then
  369. echo "Found $lkfile -> skip download."
  370. return 0
  371. fi
  372. trap 'rm -f "$lkfile"' INT
  373. echo $$ > "$lkfile"
  374. # Check if we only like to test the cksum(s)
  375. #
  376. if [ $checkonly = 1 ] ; then
  377. gzfile="$bzfile"
  378. if [ ! -f "$bzfile" ] ; then
  379. echo "File missing: $bzfile"
  380. rm -f "$lkfile" ; trap INT ; return 1
  381. fi
  382. if [ -z "${cksum##X*}" ] ; then
  383. echo "No checksum (ignore): $bzfile"
  384. rm -f "$lkfile" ; trap INT ; return 1
  385. fi
  386. if [ "$cksum" -eq 0 ] ; then
  387. echo "No checksum (missing): $bzfile"
  388. rm -f "$lkfile" ; trap INT ; return 1
  389. fi
  390. elif [ -s "$gzfile" ] ; then
  391. echo ; echo "Already downloaded $gzfile ..."
  392. else
  393. echo ; echo "Downloading $gzfile ..."
  394. # Existing *.cksum-err
  395. #
  396. if [ -s "$gzfile.cksum-err" ] ; then
  397. # cksum-err file alread exists:
  398. echo "ERROR: Found $gzfile.cksum-err."
  399. echo "ERROR: That means that we downloaded the" \
  400. "file already and it had an"
  401. echo "ERROR: incorrect checksum. Remove the" \
  402. "*.cksum-err file to force a"
  403. echo "ERROR: new download of that file."
  404. rm -f "$lkfile" ; trap INT ; return 1
  405. fi
  406. # Existing *.extck-err
  407. #
  408. if [ -s "$gzfile.extck-err" ] ; then
  409. # extck-err file alread exists:
  410. echo "ERROR: Found $gzfile.extck-err."
  411. echo "ERROR: That means that we downloaded the" \
  412. "file already and it's content"
  413. echo "ERROR: did not match it's filename extension." \
  414. "Remove the *.extck-err file"
  415. echo "ERROR: to force a new download of that file."
  416. rm -f "$lkfile" ; trap INT ; return 1
  417. fi
  418. # Questionable URL
  419. #
  420. if [ "$location" != "${location#\?}" ] ; then
  421. if [ "$tryques" = 0 ] ; then
  422. echo "ERROR: URL is marked as questionable." \
  423. "Not downloading this file."
  424. rm -f "$lkfile" ; trap INT ; return 1
  425. else
  426. echo "WARNING: URL is marked as questionable." \
  427. "Downloading it anyways."
  428. location="${location#\?}"
  429. fi
  430. fi
  431. # Make directory (if required)
  432. #
  433. if [ ! -d `dirname "$bzfile"` ] ; then
  434. mkdir -p `dirname "$bzfile"`
  435. fi
  436. # Alternative Directory
  437. #
  438. if [ "$altdir" ] ; then
  439. altfile=$(find $altdir/ -name `basename $bzfile` | head -n 1)
  440. else
  441. altfile=""
  442. fi
  443. #FIXME: compatibility, can be removed sooner or later...
  444. # Check old download dir layout
  445. if [ -z "$altfile" ]; then
  446. if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
  447. altfile="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
  448. fi
  449. fi
  450. if [ "$altfile" ] ; then
  451. echo "Found `basename $bzfile` as $altfile."
  452. if [ "$altcopy" = 'link' ]; then
  453. cp -lv $altfile $bzfile
  454. elif [ "$altcopy" = 'copy' ]; then
  455. cp -v $altfile $bzfile
  456. elif [ "$altcopy" = 'move' ]; then
  457. mv -v $altfile $bzfile
  458. fi
  459. gzfile="$bzfile"
  460. else
  461. # Mirroring
  462. #
  463. if [ -n "$mirror" -a "$mirror" != "none" -a -z "${bzfile##download/mirror/*}" ] ; then
  464. # try to use mirror
  465. if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
  466. echo "INFO: download from mirror failed, trying original URL." 1>&2
  467. if ! is_static $repo $pkg "$gzfile" "$location"; then
  468. rm -f "$lkfile" ; trap INT ; return 1
  469. fi
  470. download_file_now $location $gzfile $bzfile
  471. else
  472. gzfile="$bzfile"
  473. fi
  474. else
  475. # don't want to use mirror
  476. if ! is_static $repo $pkg "$gzfile" "$location"; then
  477. rm -f "$lkfile" ; trap INT ; return 1
  478. fi
  479. download_file_now $location $gzfile $bzfile
  480. fi
  481. fi
  482. if [ ! -s "$gzfile" ]; then
  483. rm -f "$lkfile" ; trap INT ; return 1
  484. fi
  485. fi
  486. # Convert a .gz to .bz2 and test checksum
  487. #
  488. if [ "$gzfile" != "$bzfile" ] ; then
  489. do_cksum_chk $cksum \
  490. && echo "bzip'ing + cksum-test: $gzfile" \
  491. || echo "bzip'ing: $gzfile"
  492. gunzip < "$gzfile" > src/down.$$.dat
  493. if cksum_chk src/down.$$.dat $cksum "$gzfile" ; then
  494. bzip2 < src/down.$$.dat > "$bzfile" ; rm -f "$gzfile"
  495. fi
  496. rm -f src/down.$$.dat
  497. # Execute a cksum test on a bzip2 file
  498. #
  499. elif [ "${gzfile%.bz2}" != "$gzfile" -o \
  500. "${gzfile%.tbz2}" != "$gzfile" ]
  501. then
  502. if [ $nocheck = 0 ] && do_cksum_chk $cksum ; then
  503. echo "cksum-test (bzip2): $bzfile"
  504. bunzip2 < "$bzfile" > src/down.$$.dat
  505. cksum_chk src/down.$$.dat $cksum "$bzfile"
  506. rm -f src/down.$$.dat
  507. fi
  508. # Execute a cksum test on a raw data file
  509. #
  510. elif [ $nocheck = 0 ] ; then
  511. echo "cksum-test (raw): $gzfile"
  512. cksum_chk "$gzfile" $cksum "$gzfile"
  513. fi
  514. # Calculate missing checksums if requested
  515. #
  516. create_cksumpatch $repo $pkg
  517. # Free Lock and finish
  518. #
  519. rm -f "$lkfile" ; trap INT ; return 0
  520. }
  521. # download_file_now location remote_filename local_filename
  522. #
  523. # This function executes the actual download using curl.
  524. #
  525. download_file_now() {
  526. local location="$1" gzfile="$2" bzfile="$3"
  527. # Create URL
  528. #
  529. if [ "${location#!}" != "$location" ] ; then
  530. url="`echo "$location" | sed 's,!,,'`"
  531. else
  532. url="`echo "$location" | \
  533. sed 's,/[^/]*$,,'`/`echo $gzfile | sed 's,.*/,,'`"
  534. fi
  535. # Check for existing Error Log
  536. #
  537. if test -s src/Download-Errors &&
  538. grep -q " $url\$" src/Download-Errors ; then
  539. echo "ERROR: According to src/Download-Errors" \
  540. "we had already an error for the URL"
  541. echo "ERROR: $url"
  542. echo "ERROR: So I'm not trying to download" \
  543. "it again (remove src/Download-Errors"
  544. echo "ERROR: if you want to force a retry)."
  545. return 1
  546. fi
  547. # Download
  548. #
  549. if [[ $url = svn://* ]] ; then
  550. # svn://mode:[login[:password]@]server[:port]:/path::revision/
  551. urlorig=${url}
  552. url=${location#!}
  553. url="${url#svn://}"; url="${url%/}"
  554. mode="${url%%:*}"
  555. url="${url#*:}"
  556. if [ "${url%@*}" = "${url}" ] ; then
  557. username=""
  558. password=""
  559. else
  560. username="${url%%@*}"
  561. if [ "${username%:*}" != "${username}" ] ; then
  562. password="--password ${username#*:}"
  563. username="${username%%:*}"
  564. fi
  565. username="--username ${username}"
  566. fi
  567. url="${url##*@}"
  568. rev="${url##*::}"
  569. if [ -z "${rev}" -o "${rev}" == "${url}" ] ; then
  570. rev=""
  571. else
  572. rev="-r ${rev}"
  573. fi
  574. url="${url%::*}"
  575. old=${PWD}
  576. tmp="`mktemp`"
  577. rm -rf ${tmp}
  578. dir=${bzfile%.tar.bz2}
  579. dir="`basename ${dir}`"
  580. mkdir -p ${tmp}
  581. cd ${tmp}
  582. echo SVN ${username} ${password} ${rev} ${mode}://${url}
  583. { echo svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
  584. if ! svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
  585. then touch .svn_error ; fi
  586. } &> .svn_output &
  587. while fuser .svn_output &> /dev/null ; do
  588. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  589. 'SVN archive so far...\r'
  590. sleep 3
  591. done
  592. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  593. 'SVN archive (download finished).'
  594. if [ ! -f .svn_error ] ; then
  595. rm -f .svn_output
  596. tar --owner root --group root \
  597. --use-compress-program=bzip2 \
  598. -cf ${dir}.tar.bz2 ${dir}
  599. mv ${dir}.tar.bz2 ${old}/${bzfile}
  600. cd ${old} ; rm -rf ${tmp}
  601. else
  602. cat .svn_output
  603. cd $old ; rm -rf $tmp
  604. echo ERROR: SVN export ${username} ${password} ${mode}://${url} ${dir} \
  605. returned an error.
  606. echo "0 $gzfile $urlorig" >> src/Download-Errors
  607. fi
  608. elif [[ $url = cvs://* ]] ; then
  609. # cvs://mode:[login[:password]@]server[:port]:/path::module!revision/
  610. # btw, at least current cvs supports password at CVSROOT.
  611. url="${url#cvs://}"; url="${url%/*}"
  612. # cvs://mode:loc::module!date/
  613. #
  614. mode="${url%%:*}"; loc="${url#*:}"
  615. module="${loc##*::}"; loc="${loc%%::*}"
  616. revision="${module#*!}"; module="${module%%!*}"
  617. [[ $loc != *@* ]] && loc="anonymous@$loc"
  618. # everything after the first 'bang' (!) is analysed here
  619. # someday we could add more cvs options.
  620. #
  621. dat="$( echo $revision | \
  622. sed -n -e 's,\([0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}\),-D \1,p' )"
  623. cvsdir="src/down.cvsdir.`echo $bzfile | tr / -`"
  624. saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir
  625. echo CVS $mode $loc $dat $module
  626. { [ $mode = ssh ] && export CVS_RSH=ssh
  627. [ $mode = pserver ] && loc=":pserver:$loc"
  628. # sometimes cvs wants to read ~/.cvspass just for fun ..
  629. touch $HOME/.cvspass
  630. # for ssh we need some way to quitely accept the key ...
  631. echo cvs -z9 -Q -d $loc checkout $dat -P $module
  632. if ! cvs -z9 -Q -d $loc checkout $dat -P $module
  633. then touch .cvs_error ; fi
  634. } &> .cvs_output &
  635. while fuser .cvs_output &> /dev/null ; do
  636. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  637. 'CVS archive so far...\r'
  638. sleep 3
  639. done
  640. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  641. 'CVS archive (download finished).'
  642. if [ ! -f .cvs_error ] ; then
  643. cd `dirname $module`
  644. dir="`echo "$bzfile" | sed s/\.tar\.bz2$//`"
  645. dir="`basename $dir`"
  646. mv `basename $module` $dir
  647. tar --owner root --group root \
  648. --use-compress-program=bzip2 \
  649. -cf $dir.tar.bz2 $dir
  650. mv $dir.tar.bz2 $saved_pwd/$bzfile
  651. cd $saved_pwd ; rm -rf $cvsdir
  652. else
  653. cat .cvs_output
  654. cd $saved_pwd ; rm -rf $cvsdir
  655. echo ERROR: CVS $dat $loc $module \
  656. returned an error.
  657. echo "0 $gzfile $url" >> src/Download-Errors
  658. fi
  659. else
  660. if [ -s "$gzfile.incomplete" ] ; then
  661. echo "INFO: Trying to resume previous download .." 1>&2
  662. resume="-C -"
  663. else
  664. resume=""
  665. fi
  666. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
  667. curlret="$?"
  668. if [ "$resume" ] && \
  669. [ $curlret -eq 33 -o $curlret -eq 36 ] ; then
  670. echo "INFO: Resuming download not possible. ->" \
  671. "Overwriting old file." 1>&2
  672. rm -f "$gzfile.incomplete"
  673. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
  674. curlret="$?"
  675. fi
  676. if [ $curlret -ne 0 ] ; then
  677. case "$curlret" in
  678. 18)
  679. echo "WARNING: Got only some of the" \
  680. "file. A re-run of $0"
  681. echo "WARNING: is required to complete" \
  682. "the download." ;;
  683. 130)
  684. echo -e '\rWARNING: CURL got a SIGINT' \
  685. "(someone pressed Ctrl-C). A re-run of"
  686. echo "WARNING: $0 is required to complete" \
  687. "the download." ; sleep 1 ;;
  688. *)
  689. echo "$curlret $gzfile $url" \
  690. >> src/Download-Errors
  691. echo -e '\rERROR: CURL Returned Error' \
  692. "$curlret. Please read" \
  693. "the curl manpage." ;;
  694. esac
  695. return 1
  696. elif [ ! -s "$gzfile.incomplete" ] ; then
  697. echo "0 $gzfile $url" >> src/Download-Errors
  698. echo "ERROR: CURL returned success but" \
  699. "we have no data!"
  700. curlret=1
  701. else
  702. case "$gzfile" in
  703. *.gz|*.tgz)
  704. typeexpr="gzip compressed data" ;;
  705. *.bz2|*.tbz2)
  706. typeexpr="bzip2 compressed data" ;;
  707. *.Z|*.tZ)
  708. typeexpr="compress'd data" ;;
  709. *.zip|*.jar)
  710. typeexpr="Zip archive data" ;;
  711. *.tar)
  712. typeexpr="tar archive" ;;
  713. *.txt)
  714. typeexpr="ASCII English text" ;;
  715. *)
  716. echo "WARNING: Unkown file extension: $gzfile"
  717. typeexpr="." ;;
  718. esac
  719. if file "$gzfile.incomplete" | grep -v "$typeexpr"
  720. then
  721. echo "ERROR: File type does not match" \
  722. "file name ($typeexpr)!"
  723. mv "$gzfile.incomplete" "$gzfile.extck-err"
  724. return 1
  725. else
  726. mv "$gzfile.incomplete" "$gzfile"
  727. rm -f "$gzfile".{extck,cksum}-err
  728. fi
  729. fi
  730. fi
  731. }
  732. list_dtags() {
  733. {
  734. # Split the long list of .desc files into smaller chunks here.
  735. find package -name "*.desc" | \
  736. while read x ; do descparser $x ; done | grep '^\[D\] '
  737. grep -h '^[X0-9]' target/*/download.txt | sed 's,^,[D] ,'
  738. grep -h '^[X0-9]' target/*/*/download.txt | sed 's,^,[D] ,'
  739. } | column_clean
  740. }
  741. list_cksums() {
  742. trap '' INT
  743. list_dtags | sed \
  744. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  745. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" \
  746. | sed 's,^\(.*/\)[^/:]*:[^ ]* \([X0-9]*\) ,\2 \1,;' | cut -f1,2 -d' '
  747. trap INT
  748. }
  749. list() {
  750. trap '' INT
  751. list_dtags | sed \
  752. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  753. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" | awk '{print $2;}'
  754. trap INT
  755. }
  756. list_unknown() {
  757. trap '' INT
  758. mkdir -p src/ ; list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' > src/down.$$.lst
  759. ls download/{INDEX,README,DOWNTEST,LAST-UPDATE} \
  760. >> src/down.$$.lst 2> /dev/null
  761. find download/* -follow -type f -o -type l | \
  762. egrep -v '^download/([^/]*(/.)?|mirror/(DOWNTEST|LAST-UPDATE|README))$' | \
  763. while read fn ; do
  764. grep -qx "$fn" src/down.$$.lst || echo "Unknown file: $fn"
  765. done
  766. rm -f src/down.$$.lst
  767. trap INT
  768. }
  769. list_missing() {
  770. if [ -z "${config}" ] ; then
  771. list_missing_all
  772. return
  773. fi
  774. if [ ! -f config/$config/packages ]; then
  775. echo "ERROR: Config $config doesn't exist."
  776. echo "ERROR: try ./scripts/Config -cfg $config first."
  777. exit 1
  778. fi
  779. ignore_xpkg=0
  780. while read on a b repo pkg c ; do
  781. forkedpkg=${pkg#*=}
  782. [ "$forkedpkg" = "$pkg" ] || pkg=${pkg%=*}
  783. if [ "${on}" = "X" ] ; then
  784. descparser "xpkg=${forkedpkg}" "package/${repo}/${pkg}/${pkg}.desc" | column_clean | grep '^\[D\]' | \
  785. sed -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  786. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" | awk '{print $2;}' | \
  787. sed -e 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
  788. while read fn ; do
  789. [ -f "${fn}" ] || echo "${fn}"
  790. done
  791. fi
  792. done < config/$config/packages
  793. }
  794. list_missing_all() {
  795. trap '' INT
  796. list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
  797. while read fn ; do
  798. [ -f "$fn" ] || echo "$fn"
  799. done
  800. trap INT
  801. }
  802. repository() {
  803. for repository ; do
  804. packages `echo package/$repository/*/*.desc`
  805. done
  806. }
  807. smpatches() {
  808. submaster_url="http://www.rocklinux.net/submaster"
  809. id2url="s#\([0-9]\{4,4\}\)\([0-9]\{2,2\}\)\([0-9]*\)#$submaster_url/data/\1/\2/\3.patch#"
  810. {
  811. while [ -n "$1" ]
  812. do
  813. # convert $1 (smid) via $id2url to submaster url;
  814. # convert url to curl input for -K (url = "XXX")
  815. echo "$1" | sed "/[0-9]\{10\}/ {$id2url ; s/^\(.*\)$/url = \"\1\"/; p; }; d; "
  816. shift
  817. done
  818. } | curl --progress-bar $curl_options -K -
  819. }
  820. required() {
  821. # Choosen config must exist
  822. #
  823. if [ ! -f config/$config/packages ]; then
  824. echo "ERROR: Config $config doesn't exist."
  825. echo "ERROR: try ./scripts/Config -cfg $config first."
  826. exit 1
  827. fi
  828. ignore_xpkg=0
  829. local forkedpkg
  830. while read on a b repo pkg c ; do
  831. forkedpkg=${pkg#*=}
  832. [ "$forkedpkg" = "$pkg" ] || pkg=${pkg%=*}
  833. if [ "$on" = "X" ] ; then
  834. descparser "xpkg=$forkedpkg" "package/$repo/$pkg/$pkg.desc" | grep '^\[D\] ' > src/down.$$.lst
  835. while read tag cksum file url flags ; do
  836. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  837. done < src/down.$$.lst ; rm -f src/down.$$.lst
  838. fi
  839. done < config/$config/packages
  840. target=`grep '^export ROCKCFG_TARGET=' config/$config/config | \
  841. cut -f2 -d= | tr -d "'"`
  842. arch=`grep '^export ROCKCFG_ARCH=' config/$config/config | \
  843. cut -f2 -d= | tr -d "'"`
  844. for targetdl in target/$target/{,$arch/}download.txt ; do
  845. if [ -f $targetdl ] ; then
  846. while read cksum file url flags ; do
  847. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  848. done < $targetdl
  849. fi
  850. done
  851. }
  852. all() {
  853. local each repo pkg
  854. for each in package/*/*/*.desc; do
  855. pkg="`echo $each | cut -f3 -d/`"
  856. repo="`echo $each | cut -f2 -d/`"
  857. while read tag cksum file url flags ; do
  858. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  859. done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
  860. done
  861. for each in target/*/download.txt target/*/*/download.txt; do
  862. target="`echo $each | cut -f2 -d/`"
  863. while read cksum file url flags ; do
  864. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  865. done < <(cat $each)
  866. done
  867. }
  868. packages() {
  869. local descfile
  870. local forkedpkg
  871. for arg; do
  872. # Check if this is a forked package name
  873. case "$arg" in
  874. *=*) ignore_xpkg=0; forkedpkg="xpkg=${arg#*=}"; arg=${arg%=*};;
  875. *) ignore_xpkg=1; forkedpkg="";;
  876. esac
  877. case "$arg" in
  878. target/*)
  879. if [ ! -f $arg ]; then
  880. echo "Skipping \"$arg\" (not found)!"
  881. continue
  882. fi
  883. target="`echo $arg | cut -f2 -d/`"
  884. while read cksum file url flags ; do
  885. download_file "`source_file cksum $file url $flags`" \
  886. "$url" "$cksum" "$target"
  887. done < <(cat $arg)
  888. ;;
  889. *)
  890. if [ ! "${arg%.desc}" = "$arg" ]; then
  891. descfile=$arg
  892. else
  893. descfile="`echo package/*/$arg/$arg.desc`"
  894. fi
  895. if [ ! -f $descfile ]; then
  896. echo "Skipping \"$arg\" (not found)!"
  897. continue
  898. fi
  899. pkg="`echo $descfile | cut -f3 -d/`"
  900. repo="`echo $descfile | cut -f2 -d/`"
  901. while read tag cksum file url flags ; do
  902. download_file "`source_file cksum $file url $flags`" \
  903. "$url" "$cksum" "$repo" "$pkg"
  904. done < <(descparser $forkedpkg package/$repo/$pkg/$pkg.desc |
  905. grep '^\[D\] ')
  906. ;;
  907. esac
  908. done
  909. }
  910. mapped_packages() {
  911. if [ ! -f src/pkgmapper ]
  912. then
  913. mkdir -p src
  914. bash scripts/xfind.sh package/. -type f -name 'pkgmapper.in' \
  915. -printf '%f\t%p\n' | sort | awk '{ $1="."; print; }' > src/pkgmapper
  916. fi
  917. for pkg; do
  918. export xpkg=${pkg#*=}
  919. . src/pkgmapper
  920. packages ${pkg%=*}=$xpkg
  921. done
  922. }
  923. # Things to do only for downloading
  924. #
  925. if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
  926. # Set proxy information
  927. if [ -f download/Proxy ]; then
  928. proxy="`cat download/Proxy`"
  929. if [ "$proxy" ]; then
  930. curl_options="$curl_options --proxy $proxy"
  931. else
  932. echo "INFO: No proxy information... removing download/Proxy." 1>&2
  933. rm download/Proxy
  934. fi
  935. fi
  936. if [ -f download/Proxy-auth ]; then
  937. proxyauth="`cat download/Proxy-auth`"
  938. if [ "$proxyauth" ]; then
  939. curl_options="$curl_options --proxy-user $proxyauth"
  940. else
  941. echo "INFO: No proxy-auth information... removing download/Proxy-auth." 1>&2
  942. rm download/Proxy-auth
  943. fi
  944. fi
  945. # Thing to do only once
  946. #
  947. if [ $this_is_the_2nd_run = 0 ]; then
  948. # am i using a proxy?
  949. # -- say i'm doing it even when i already did ;-)
  950. if [ "$proxy" ]; then
  951. echo "INFO: Setting proxy to $proxy." 1>&2
  952. fi
  953. if [ "$proxyauth" ]; then
  954. echo "INFO: Setting proxy authentication information." 1>&2
  955. fi
  956. # do mirror detection
  957. detect_mirror
  958. fi
  959. fi
  960. case "$1" in
  961. -list) list ;;
  962. -list-dtags) list_dtags ;;
  963. -list-unknown) list_unknown ;;
  964. -list-missing) list_missing ;;
  965. -list-cksums) list_cksums ;;
  966. -required) required ;;
  967. -all) all ;;
  968. -repository) shift ; repository "$@" ;;
  969. -sm) shift ; smpatches "$@" ;;
  970. -*|"") exec $0 -help ;;
  971. *) mapped_packages "$@" ;;
  972. esac
  973. exit 0