mirror of the now-defunct rocklinux.org
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

923 lines
26 KiB

  1. #!/bin/bash
  2. #
  3. # --- ROCK-COPYRIGHT-NOTE-BEGIN ---
  4. #
  5. # This copyright note is auto-generated by ./scripts/Create-CopyPatch.
  6. # Please add additional copyright information _after_ the line containing
  7. # the ROCK-COPYRIGHT-NOTE-END tag. Otherwise it might get removed by
  8. # the ./scripts/Create-CopyPatch script. Do not edit this copyright text!
  9. #
  10. # ROCK Linux: rock-src/scripts/Download
  11. # ROCK Linux is Copyright (C) 1998 - 2004 Clifford Wolf
  12. #
  13. # This program is free software; you can redistribute it and/or modify
  14. # it under the terms of the GNU General Public License as published by
  15. # the Free Software Foundation; either version 2 of the License, or
  16. # (at your option) any later version. A copy of the GNU General Public
  17. # License can be found at Documentation/COPYING.
  18. #
  19. # Many people helped and are helping developing ROCK Linux. Please
  20. # have a look at http://www.rocklinux.org/ and the Documentation/TEAM
  21. # file for details.
  22. #
  23. # --- ROCK-COPYRIGHT-NOTE-END ---
  24. #
  25. # Run this command from the ROCK directory as ./scripts/Download [ options ]
  26. #
  27. # It enables you to download source files as described in the package
  28. # definitions (optionally using a mirroring 'cache' server).
  29. #
  30. # This script also allows for checksum display/validation.
  31. umask 022
  32. . scripts/functions
  33. if [ "$1" = '-help' ] ; then
  34. { echo
  35. echo "Usage:"
  36. echo
  37. echo " ./scripts/Download [ options ] [ <Package(s)> ]"
  38. echo " ./scripts/Download [ options ] [ <Desc file(s)> ]"
  39. echo " ./scripts/Download [ options ] -repository <Repositories>"
  40. echo " ./scripts/Download [ options ] -import <package(s)>"
  41. echo " ./scripts/Download [ options ] -sm <SM-PATCH-ID(s)>"
  42. echo " ./scripts/Download [ options ] { -all | -required }"
  43. echo
  44. echo " Download files required for given packages, package description files, package"
  45. echo " repositories, import packages, or build configurations."
  46. echo " On default, this script auto-detects the best ROCK Linux mirror."
  47. echo " See '-mirror none' output for help on bypassing the official mirrors."
  48. echo
  49. echo " -all download all files for a build configuration"
  50. echo " -required download only files for packages that will be built in"
  51. echo " the given configuration"
  52. echo
  53. echo " Where [ options ] is an alias for:"
  54. echo " [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
  55. echo " [ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]"
  56. echo " [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
  57. echo " [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
  58. echo " [ -copy ] [ -move ]"
  59. echo
  60. echo " -cfg <config> download files for the given configuration"
  61. echo " -nock skip checksum checks (don't use lightly)"
  62. echo " -alt-dir <AlternativeDirectory>"
  63. echo " check for files to download also in the given directory"
  64. echo " -mirror <URL> set the download mirror to use"
  65. echo " Mirrors can also be local directories in the form"
  66. echo " of 'file:///<dir>'"
  67. echo " -check check checksums only; don't download files"
  68. echo " -try-questionable also try to download from URLs marked as"
  69. echo " questionable"
  70. echo " -notimeout don't apply timeout settings to curl"
  71. echo " -longtimeout apply long timeout settings"
  72. echo " By default, timeouts for connection and speed-limit"
  73. echo " are set"
  74. echo " -curl-opt <curl-option> pass option(s) to curl"
  75. echo " -proxy <server>[:<port>]"
  76. echo " -proxy-auth <username>[:<password>]"
  77. echo " pass proxy and proxy authentication to curl"
  78. echo " Warning: authentication can be seen with ps!"
  79. echo " -copy copy files from the old download directory layout,"
  80. echo " if existent"
  81. echo " -move move files instead"
  82. echo " Default is to link files, if existent, from the old"
  83. echo " layout to the new one"
  84. echo
  85. echo " ./scripts/Download -mk-cksum <Filename(s)>"
  86. echo " ./scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
  87. echo
  88. echo " -mk-cksum <Filenames> calculate checksums on files as used in package"
  89. echo " descriptions"
  90. echo " -list all files a complete download directory would contain"
  91. echo " -list-cksums as -list, with an checksum for each file"
  92. echo " -list-unknown files in the download directory that are not in any"
  93. echo " package description, e.g. old files"
  94. echo " -list-missing files in package descriptions that are not"
  95. echo " downloaded (yet)"
  96. echo ; } >&2
  97. exit 1
  98. fi
  99. # -mk-cksum mode (display ROCK type package checksum): it
  100. # displays the checksum ROCK validates against.
  101. #
  102. # Currently bz2, tbz2, gz, tgz, Z are unpacked
  103. #
  104. if [ "$1" = -mk-cksum ] ; then
  105. shift
  106. for x ; do
  107. echo -n "$x: "
  108. if [ ! -f "$x" ] ; then
  109. echo "No such file."
  110. elif [ "${x%.bz2}" != "$x" -o "${x%.tbz2}" != "$x" ] ; then
  111. bunzip2 < "$x" | cksum | cut -f1 -d' '
  112. elif [ "${x%.gz}" != "$x" -o "${x%.tgz}" != "$x" ] ; then
  113. gunzip < "$x" | cksum | cut -f1 -d' '
  114. elif [ "${x%.Z}" != "$x" ] ; then
  115. uncompress < "$x" | cksum | cut -f1 -d' '
  116. else
  117. cksum < "$x" | cut -f1 -d' '
  118. fi
  119. done
  120. exit 1
  121. fi
  122. # Handle options passed on the command line
  123. #
  124. mkdir -p src/ download/ ; config=default
  125. this_is_the_2nd_run=0
  126. mirror='' ; checkonly=0 ; altdir='' ; loop=1
  127. tryques=0 ; nocheck=0 ; options='-this_is_the_2nd_run '
  128. notimeout=0 ; curl_options='--disable-epsv --location'
  129. altcopy=link
  130. #
  131. while [ $loop -eq 1 ] ; do
  132. case "$1" in
  133. -this_is_the_2nd_run)
  134. this_is_the_2nd_run=1
  135. shift ;;
  136. -cfg)
  137. options="$options -cfg $2"
  138. config="$2" ; shift ; shift ;;
  139. -nock)
  140. # -nock skips checksum checking (don't use lightly)
  141. options="$options -nock"
  142. nocheck=1 ; shift ;;
  143. -mirror)
  144. # -mirror uses a mirror for finding source files
  145. if [ "$2" = none ]; then
  146. echo
  147. echo "The option '-mirror none' is not supported anymore!"
  148. echo
  149. echo "You may 'echo none > download/Mirror' if you really"
  150. echo "want to use the original download resources. However, this"
  151. echo "is not supported and if such a download fails, this is not"
  152. echo "a bug in ROCK Linux and doesn't neccessarily needs fixing."
  153. echo
  154. exit 1;
  155. else
  156. mkdir -p download
  157. echo "$2" > download/Mirror
  158. options="$options -mirror $2"
  159. mirror="$2"
  160. fi
  161. shift ; shift ;;
  162. -check)
  163. # -check just validates the file using the checksum
  164. options="$options -check"
  165. checkonly=1 ; shift ;;
  166. -notimeout)
  167. # don't add timeout curl options
  168. options="$options -notimeout"
  169. notimeout=2 ; shift ;;
  170. -longtimeout)
  171. # don't add timeout curl options
  172. options="$options -longtimeout"
  173. notimeout=1 ; shift ;;
  174. -curl-opt)
  175. # additional curl options
  176. options="$options -curl-opt $2"
  177. curl_options="$curl_options `echo $2 | tr : ' '`"
  178. shift ; shift ;;
  179. -proxy)
  180. # proxy option for curl
  181. mkdir -p download
  182. echo -n "$2" > download/Proxy
  183. options="$options -proxy $2"
  184. shift ; shift ;;
  185. -proxy-auth)
  186. # proxy authentication for curl - can be seen with ps!
  187. mkdir -p download
  188. echo -n "$2" > download/Proxy-auth
  189. chmod 600 download/Proxy-auth
  190. options="$options -proxy-auth $2"
  191. shift ; shift ;;
  192. -alt-dir)
  193. # check for an alternative directory where to search for
  194. # package source tarballs
  195. options="$options -alt-dir $2"
  196. altdir=$2 ; shift ; shift ;;
  197. -try-questionable)
  198. # also try to download questionable URLs
  199. options="$options -try-questionable"
  200. tryques=1 ; shift ;;
  201. -move) altcopy=move ; shift ;;
  202. -copy) altcopy=copy ; shift ;;
  203. *)
  204. loop=0 ;;
  205. esac
  206. done
  207. if [ $notimeout -eq 0 ] ; then
  208. curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
  209. fi
  210. if [ $notimeout -eq 1 ] ; then
  211. curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
  212. fi
  213. #Disable checking for certificates on https downloads
  214. curl_options="$curl_options -k"
  215. # build descparser if needed and run it
  216. descparser() {
  217. if [ ! -f src/descparser ]; then
  218. mkdir -p src
  219. cc -o src/descparser misc/tools-source/descparser.c
  220. fi
  221. cat "$@" | descparser_ign_xpkg=1 src/descparser
  222. }
  223. # cksum_chk filename cksum origfile
  224. #
  225. # This function verifies the checksum. If it fails it renames the file
  226. # to file.chksum-err and returns failure.
  227. #
  228. # It seams like the [ ] command has problems with comparing high numbers.
  229. # That's why I'm using a text comparison here.
  230. #
  231. # Not doing anything if checksum is '0' or a text of 'X'.
  232. #
  233. cksum_chk() {
  234. y="`echo $2 | sed 's,^0*,,;'`"
  235. [ $nocheck = 1 -o -z "$y" -o -z "${2//X/}" ] && return 0
  236. x="`cksum "$1" | cut -f1 -d' ' | sed 's,^0*,,;'`"
  237. if [ "$x" != "$y" ] ; then
  238. # Add .cksum-err extension to filename:
  239. echo "Cksum ERROR: $3.cksum-err ($x)"
  240. mv "$3" "$3.cksum-err" ; return 1
  241. fi
  242. return 0
  243. }
  244. # Autodetect best Mirror and safe url in $mirror
  245. #
  246. detect_mirror() {
  247. if [ -f download/Mirror ] ; then
  248. mirror="`cat download/Mirror`"
  249. if [ -z "$mirror" -o "$mirror" = "none" ] ; then
  250. echo "INFO: Found download/Mirror: none" \
  251. "(use the original download locations)" 1>&2
  252. else
  253. echo "INFO: Found cached mirror URL in download/Mirror:" 1>&2
  254. echo "INFO: $mirror" 1>&2
  255. fi
  256. echo "INFO: To force a new mirror auto-detection, remove download/Mirror." 1>&2
  257. else
  258. echo "INFO: Auto-detecting best mirror ..." 1>&2
  259. eval "$(egrep '^(rockver)=' scripts/parse-config)"
  260. echo "INFO: Downloading mirror-list from www.rocklinux.net." 1>&2
  261. curl -s -S $curl_options -o src/Download-Mirror-List \
  262. "http://www.rocklinux.net/mirrors.cgi?$rockver"
  263. bestval=0 ; result='No Mirror Found!'
  264. while read mirror_name ; do
  265. if [ "${mirror_name#=}" != "$mirror_name" ] ; then
  266. mirror_name="${mirror_name#= }"
  267. mirror_name="${mirror_name% =}"
  268. read mirror_url
  269. echo -n "INFO: Testing <$mirror_name> ..." 1>&2
  270. val="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST" \
  271. -w "ok %{speed_download}" -o /dev/null)"
  272. if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" ] ; then
  273. echo " error"
  274. else
  275. xval=`echo ${val#ok } | tr -d .` ; echo " $val"
  276. if [ "$xval" -gt "$bestval" ] ; then
  277. bestval=$xval ; mirror="${mirror_url%/}"
  278. result="Using mirror <$mirror>."
  279. fi
  280. fi
  281. fi
  282. done < src/Download-Mirror-List
  283. echo $mirror > download/Mirror
  284. echo "INFO: $result" 1>&2
  285. fi
  286. }
  287. # download_file local-filename download-location cksum repo pkg
  288. #
  289. # This function decides if download directly or from a mirror,
  290. # validates checksum, etc.
  291. # Calls download_file_now to do the actual download.
  292. #
  293. download_file() {
  294. # Init
  295. #
  296. local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
  297. # Make src directory for creating tar balls
  298. mkdir -p src/
  299. # Tarball file name: (if you change this one - also adapt Create-ISO)
  300. bzfile="`echo "$gzfile" | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,'`"
  301. # Lock file name:
  302. lkfile="src/down.lockfile.`echo $bzfile | tr / -`"
  303. # Check if it's already there
  304. #
  305. [ -s "$bzfile" -a $checkonly != 1 ] && return 0
  306. # Make locking
  307. #
  308. if [ -s "$lkfile" ]; then
  309. echo "Found $lkfile -> skip download."
  310. return 0
  311. fi
  312. trap 'rm -f "$lkfile"' INT
  313. echo $$ > "$lkfile"
  314. # Check if we only like to test the cksum(s)
  315. #
  316. if [ $checkonly = 1 ] ; then
  317. gzfile="$bzfile"
  318. if [ ! -f "$bzfile" ] ; then
  319. echo "File missing: $bzfile"
  320. rm -f "$lkfile" ; trap INT ; return 1
  321. fi
  322. if [ -z "${cksum##X*}" ] ; then
  323. echo "No checksum (ignore): $bzfile"
  324. rm -f "$lkfile" ; trap INT ; return 1
  325. fi
  326. if [ "$cksum" -eq 0 ] ; then
  327. echo "No checksum (missing): $bzfile"
  328. rm -f "$lkfile" ; trap INT ; return 1
  329. fi
  330. elif [ -s "$gzfile" ] ; then
  331. echo ; echo "Already downloaded $gzfile ..."
  332. else
  333. echo ; echo "Downloading $gzfile ..."
  334. # Existing *.cksum-err
  335. #
  336. if [ -s "$gzfile.cksum-err" ] ; then
  337. # cksum-err file alread exists:
  338. echo "ERROR: Found $gzfile.cksum-err."
  339. echo "ERROR: That means that we downloaded the" \
  340. "file already and it had an"
  341. echo "ERROR: incorrect checksum. Remove the" \
  342. "*.cksum-err file to force a"
  343. echo "ERROR: new download of that file."
  344. rm -f "$lkfile" ; trap INT ; return 1
  345. fi
  346. # Existing *.extck-err
  347. #
  348. if [ -s "$gzfile.extck-err" ] ; then
  349. # extck-err file alread exists:
  350. echo "ERROR: Found $gzfile.extck-err."
  351. echo "ERROR: That means that we downloaded the" \
  352. "file already and it's content"
  353. echo "ERROR: did not match it's filename extension." \
  354. "Remove the *.extck-err file"
  355. echo "ERROR: to force a new download of that file."
  356. rm -f "$lkfile" ; trap INT ; return 1
  357. fi
  358. # Questionable URL
  359. #
  360. if [ "$location" != "${location#\?}" ] ; then
  361. if [ "$tryques" = 0 ] ; then
  362. echo "ERROR: URL is marked as questionable." \
  363. "Not downloading this file."
  364. rm -f "$lkfile" ; trap INT ; return 1
  365. else
  366. echo "WARNING: URL is marked as questionable." \
  367. "Downloading it anyways."
  368. location="${location#\?}"
  369. fi
  370. fi
  371. # Make directory (if required)
  372. #
  373. if [ ! -d `dirname "$bzfile"` ] ; then
  374. mkdir -p `dirname "$bzfile"`
  375. fi
  376. # Alternative Directory
  377. #
  378. if [ "$altdir" ] ; then
  379. altfile=$(find $altdir/ -name `basename $bzfile` | head -n 1)
  380. else
  381. altfile=""
  382. fi
  383. #FIXME: compatibility, can be removed sooner or later...
  384. # Check old download dir layout
  385. if [ -z "$altfile" ]; then
  386. if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
  387. altfile="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
  388. fi
  389. fi
  390. if [ "$altfile" ] ; then
  391. echo "Found `basename $bzfile` as $altfile."
  392. if [ "$altcopy" = 'link' ]; then
  393. cp -lv $altfile $bzfile
  394. elif [ "$altcopy" = 'copy' ]; then
  395. cp -v $altfile $bzfile
  396. elif [ "$altcopy" = 'move' ]; then
  397. mv -v $altfile $bzfile
  398. fi
  399. gzfile="$bzfile"
  400. else
  401. # Mirroring
  402. #
  403. mirror="$( cat download/Mirror )"
  404. if [ -n "$mirror" -a "$mirror" != "none" -a -z "${bzfile##download/mirror/*}" ] ; then
  405. # try to use mirror
  406. if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
  407. echo "INFO: download from mirror failed, trying original URL." 1>&2
  408. download_file_now $location $gzfile $bzfile
  409. else
  410. gzfile="$bzfile"
  411. fi
  412. else
  413. # don't want to use mirror
  414. download_file_now $location $gzfile $bzfile
  415. fi
  416. fi
  417. if [ ! -s "$gzfile" ]; then
  418. rm -f "$lkfile" ; trap INT ; return 1
  419. fi
  420. fi
  421. # Convert a .gz to .bz2 and test checksum
  422. #
  423. if [ "$gzfile" != "$bzfile" ] ; then
  424. echo "bzip'ing + cksum-test: $gzfile"
  425. gunzip < "$gzfile" > src/down.$$.dat
  426. if cksum_chk src/down.$$.dat $cksum "$gzfile" ; then
  427. bzip2 < src/down.$$.dat > "$bzfile" ; rm -f "$gzfile"
  428. fi
  429. rm -f src/down.$$.dat
  430. # Execute a cksum test on a bzip2 file
  431. #
  432. elif [ "${gzfile%.bz2}" != "$gzfile" -o \
  433. "${gzfile%.tbz2}" != "$gzfile" ]
  434. then
  435. echo "cksum-test (bzip2): $bzfile"
  436. if [ $nocheck = 0 ] ; then
  437. bunzip2 < "$bzfile" > src/down.$$.dat
  438. cksum_chk src/down.$$.dat $cksum "$bzfile"
  439. fi
  440. rm -f src/down.$$.dat
  441. # Execute a cksum test on a raw data file
  442. #
  443. else
  444. echo "cksum-test (raw): $gzfile"
  445. cksum_chk "$gzfile" $cksum "$gzfile"
  446. fi
  447. # Free Lock and finish
  448. #
  449. rm -f "$lkfile" ; trap INT ; return 0
  450. }
  451. # download_file_now location remote_filename local_filename
  452. #
  453. # This function executes the actual download using curl.
  454. #
  455. download_file_now() {
  456. local location="$1" gzfile="$2" bzfile="$3"
  457. # Create URL
  458. #
  459. if [ "${location#!}" != "$location" ] ; then
  460. url="`echo "$location" | sed 's,!,,'`"
  461. else
  462. url="`echo "$location" | \
  463. sed 's,/[^/]*$,,'`/`echo $gzfile | sed 's,.*/,,'`"
  464. fi
  465. # Check for existing Error Log
  466. #
  467. if test -s src/Download-Errors &&
  468. grep -q " $url\$" src/Download-Errors ; then
  469. echo "ERROR: According to src/Download-Errors" \
  470. "we had already an error for the URL"
  471. echo "ERROR: $url"
  472. echo "ERROR: So I'm not trying to download" \
  473. "it again (remove src/Download-Errors"
  474. echo "ERROR: if you want to force a retry)."
  475. return 1
  476. fi
  477. # Download
  478. #
  479. if [[ $url = cvs://* ]] ; then
  480. # cvs://mode:[login[:password]@]server[:port]:/path::module!revision/
  481. # btw, at least current cvs supports password at CVSROOT.
  482. url="${url#cvs://}"; url="${url%/*}"
  483. # cvs://mode:loc::module!date/
  484. #
  485. mode="${url%%:*}"; loc="${url#*:}"
  486. module="${loc##*::}"; loc="${loc%%::*}"
  487. revision="${module#*!}"; module="${module%%!*}"
  488. [[ $loc != *@* ]] && loc="anonymous@$loc"
  489. # everything after the first 'bang' (!) is analysed here
  490. # someday we could add more cvs options.
  491. #
  492. dat="$( echo $revision | \
  493. sed -n -e 's,\([0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}\),-D \1,p' )"
  494. cvsdir="src/down.cvsdir.`echo $bzfile | tr / -`"
  495. saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir
  496. echo CVS $mode $loc $dat $module
  497. { [ $mode = ssh ] && export CVS_RSH=ssh
  498. [ $mode = pserver ] && loc=":pserver:$loc"
  499. # sometimes cvs wants to read ~/.cvspass just for fun ..
  500. touch $HOME/.cvspass
  501. # for ssh we need some way to quitely accept the key ...
  502. echo cvs -z9 -Q -d $loc checkout $dat -P $module
  503. if ! cvs -z9 -Q -d $loc checkout $dat -P $module
  504. then touch .cvs_error ; fi
  505. } &> .cvs_output &
  506. while fuser .cvs_output &> /dev/null ; do
  507. echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  508. 'CVS archive so far...\r'
  509. sleep 3
  510. done
  511. echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
  512. 'CVS archive (download finished).'
  513. if [ ! -f .cvs_error ] ; then
  514. cd `dirname $module`
  515. dir="`echo "$bzfile" | sed s/\.tar\.bz2$//`"
  516. dir="`basename $dir`"
  517. mv `basename $module` $dir
  518. tar --owner root --group root \
  519. --use-compress-program=bzip2 \
  520. -cf $dir.tar.bz2 $dir
  521. mv $dir.tar.bz2 $saved_pwd/$bzfile
  522. cd $saved_pwd ; rm -rf $cvsdir
  523. else
  524. cat .cvs_output
  525. cd $saved_pwd ; rm -rf $cvsdir
  526. echo ERROR: CVS $dat $loc $module \
  527. returned an error.
  528. echo "0 $gzfile $url" >> src/Download-Errors
  529. fi
  530. else
  531. if [ -s "$gzfile.incomplete" ] ; then
  532. echo "INFO: Trying to resume previous download .." 1>&2
  533. resume="-C -"
  534. else
  535. resume=""
  536. fi
  537. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
  538. curlret="$?"
  539. if [ "$resume" ] && \
  540. [ $curlret -eq 33 -o $curlret -eq 36 ] ; then
  541. echo "INFO: Resuming download not possible. ->" \
  542. "Overwriting old file." 1>&2
  543. rm -f "$gzfile.incomplete"
  544. curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
  545. curlret="$?"
  546. fi
  547. if [ $curlret -ne 0 ] ; then
  548. case "$curlret" in
  549. 18)
  550. echo "WARNING: Got only some of the" \
  551. "file. A re-run of $0"
  552. echo "WARNING: is required to complete" \
  553. "the download." ;;
  554. 130)
  555. echo -e '\rWARNING: CURL got a SIGINT' \
  556. "(someone pressed Ctrl-C). A re-run of"
  557. echo "WARNING: $0 is required to complete" \
  558. "the download." ; sleep 1 ;;
  559. *)
  560. echo "$curlret $gzfile $url" \
  561. >> src/Download-Errors
  562. echo -e '\rERROR: CURL Returned Error' \
  563. "$curlret. Please read" \
  564. "the curl manpage." ;;
  565. esac
  566. return 1
  567. elif [ ! -s "$gzfile.incomplete" ] ; then
  568. echo "0 $gzfile $url" >> src/Download-Errors
  569. echo "ERROR: CURL returned success but" \
  570. "we have no data!"
  571. curlret=1
  572. else
  573. case "$gzfile" in
  574. *.gz|*.tgz)
  575. typeexpr="gzip compressed data" ;;
  576. *.bz2|*.tbz2)
  577. typeexpr="bzip2 compressed data" ;;
  578. *.Z|*.tZ)
  579. typeexpr="compress'd data" ;;
  580. *.zip|*.jar)
  581. typeexpr="Zip archive data" ;;
  582. *.tar)
  583. typeexpr="tar archive" ;;
  584. *)
  585. echo "WARNING: Unkown file extension: $gzfile"
  586. typeexpr="." ;;
  587. esac
  588. if file "$gzfile.incomplete" | grep -v "$typeexpr"
  589. then
  590. echo "ERROR: File type does not match" \
  591. "filename ($typeexpr)!"
  592. mv "$gzfile.incomplete" "$gzfile.extck-err"
  593. else
  594. mv "$gzfile.incomplete" "$gzfile"
  595. fi
  596. fi
  597. fi
  598. }
  599. list_dtags() {
  600. {
  601. descparser package/*/*/*.desc | grep '^\[D\] '
  602. grep -h '^[X0-9]' target/*/download.txt | sed 's,^,[D] ,'
  603. grep -h '^[X0-9]' target/*/*/download.txt | sed 's,^,[D] ,'
  604. } | column_clean
  605. }
  606. list_cksums() {
  607. trap '' INT
  608. list_dtags | sed \
  609. -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
  610. -e "s,^$D2re$,\2 download/mirror/\4/\3,;" \
  611. | sed 's,^\(.*/\)[^/:]*:[^ ]* \([X0-9]*\) ,\2 \1,;' | cut -f1,2 -d' '
  612. trap INT
  613. }
  614. list() {
  615. trap '' INT
  616. list_dtags | awk '{print $3;}' | \
  617. while read ft; do
  618. echo "download/mirror/${ft:0:1}/$ft"
  619. done
  620. trap INT
  621. }
  622. list_unknown() {
  623. trap '' INT
  624. mkdir -p src/ ; list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' > src/down.$$.lst
  625. ls download/{INDEX,README,DOWNTEST,LAST-UPDATE} \
  626. >> src/down.$$.lst 2> /dev/null
  627. find download/* -type f -o -type l | grep -v -e download/Mirror \
  628. -e download/Proxy -e download/Proxy-auth | \
  629. while read fn ; do
  630. grep -qx "$fn" src/down.$$.lst || echo "Unknown file: $fn"
  631. done
  632. rm -f src/down.$$.lst
  633. trap INT
  634. }
  635. list_missing() {
  636. trap '' INT
  637. list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
  638. while read fn ; do
  639. [ -f "$fn" ] || echo "$fn"
  640. done
  641. trap INT
  642. }
  643. repository() {
  644. for repository ; do
  645. packages `echo package/$repository/*/*.desc`
  646. done
  647. }
  648. smpatches() {
  649. submaster_url="http://www.rocklinux.net/submaster"
  650. id2url="s#\([0-9]\{4,4\}\)\([0-9]\{2,2\}\)\([0-9]*\)#$submaster_url/data/\1/\2/\3.patch#"
  651. {
  652. while [ -n "$1" ]
  653. do
  654. # convert $1 (smid) via $id2url to submaster url;
  655. # convert url to curl input for -K (url = "XXX")
  656. echo "$1" | sed "/[0-9]\{10\}/ {$id2url ; s/^\(.*\)$/url = \"\1\"/; p; }; d; "
  657. shift
  658. done
  659. } | curl --progress-bar $curl_options -K -
  660. }
  661. required() {
  662. # Choosen config must exist
  663. #
  664. if [ ! -f config/$config/packages ]; then
  665. echo "ERROR: Config $config doesn't exist."
  666. echo "ERROR: try ./scripts/Config -cfg $config first."
  667. exit 1
  668. fi
  669. local forkedpkg
  670. while read on a b repo pkg c ; do
  671. forkedpkg=${pkg%=*}
  672. [ "$forkedpkg" = "$pkg" ] || pkg=$forkedpkg
  673. if [ "$on" = "X" ] ; then
  674. descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ' > src/down.$$.lst
  675. while read tag cksum file url flags ; do
  676. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  677. done < src/down.$$.lst ; rm -f src/down.$$.lst
  678. fi
  679. done < config/$config/packages
  680. target=`grep '^export ROCKCFG_TARGET=' config/$config/config | \
  681. cut -f2 -d= | tr -d "'"`
  682. arch=`grep '^export ROCKCFG_ARCH=' config/$config/config | \
  683. cut -f2 -d= | tr -d "'"`
  684. for targetdl in target/$target/{,$arch/}download.txt ; do
  685. if [ -f $targetdl ] ; then
  686. while read cksum file url flags ; do
  687. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  688. done < $targetdl
  689. fi
  690. done
  691. }
  692. all() {
  693. local each repo pkg
  694. for each in package/*/*/*.desc; do
  695. pkg="`echo $each | cut -f3 -d/`"
  696. repo="`echo $each | cut -f2 -d/`"
  697. while read tag cksum file url flags ; do
  698. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
  699. done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
  700. done
  701. for each in target/*/download.txt; do
  702. target="`echo $each | cut -f2 -d/`"
  703. while read cksum file url flags ; do
  704. download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
  705. done < <(cat $each)
  706. done
  707. }
  708. import() {
  709. local import
  710. for import ; do
  711. mapped_packages $( grep '#if ' package/import/$import/*.desc | \
  712. column_clean | cut -d' ' -f4)
  713. done
  714. }
  715. packages() {
  716. local descfile
  717. for arg; do
  718. case "$arg" in
  719. target/*)
  720. if [ ! -f $arg ]; then
  721. echo "Skipping \"$arg\" (not found)!"
  722. continue
  723. fi
  724. target="`echo $arg | cut -f2 -d/`"
  725. while read cksum file url flags ; do
  726. download_file "`source_file cksum $file url $flags`" \
  727. "$url" "$cksum" "$target"
  728. done < <(cat $arg)
  729. ;;
  730. *)
  731. if [ ! "${arg%.desc}" = "$arg" ]; then
  732. descfile=$arg
  733. else
  734. descfile="`echo package/*/$arg/$arg.desc`"
  735. fi
  736. if [ ! -f $descfile ]; then
  737. echo "Skipping \"$arg\" (not found)!"
  738. continue
  739. fi
  740. pkg="`echo $descfile | cut -f3 -d/`"
  741. repo="`echo $descfile | cut -f2 -d/`"
  742. while read tag cksum file url flags ; do
  743. download_file "`source_file cksum $file url $flags`" \
  744. "$url" "$cksum" "$repo" "$pkg"
  745. done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
  746. ;;
  747. esac
  748. done
  749. }
  750. mapped_packages() {
  751. if [ ! -f src/pkgmapper ]
  752. then
  753. mkdir -p src
  754. bash scripts/xfind.sh package/. -type f -name 'pkgmapper.in' \
  755. -printf '%f\t%p\n' | sort | awk '{ $1="."; print; }' > src/pkgmapper
  756. fi
  757. for pkg; do
  758. xpkg=$pkg
  759. . src/pkgmapper
  760. packages $pkg
  761. done
  762. }
  763. # Things to do only for downloading
  764. #
  765. if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
  766. # Set proxy information
  767. if [ -f download/Proxy ]; then
  768. proxy="`cat download/Proxy`"
  769. if [ "$proxy" ]; then
  770. curl_options="$curl_options --proxy $proxy"
  771. else
  772. echo "INFO: No proxy information... removing download/Proxy." 1>&2
  773. rm download/Proxy
  774. fi
  775. fi
  776. if [ -f download/Proxy-auth ]; then
  777. proxyauth="`cat download/Proxy-auth`"
  778. if [ "$proxyauth" ]; then
  779. curl_options="$curl_options --proxy-user $proxyauth"
  780. else
  781. echo "INFO: No proxy-auth information... removing download/Proxy-auth." 1>&2
  782. rm download/Proxy-auth
  783. fi
  784. fi
  785. # Thing to do only once
  786. #
  787. if [ $this_is_the_2nd_run = 0 ]; then
  788. # am i using a proxy?
  789. # -- say i'm doing it even when i already did ;-)
  790. if [ "$proxy" ]; then
  791. echo "INFO: Setting proxy to $proxy." 1>&2
  792. fi
  793. if [ "$proxyauth" ]; then
  794. echo "INFO: Setting proxy authentication information." 1>&2
  795. fi
  796. # do mirror detection
  797. detect_mirror
  798. fi
  799. fi
  800. case "$1" in
  801. -list) list ;;
  802. -list-dtags) list_dtags ;;
  803. -list-unknown) list_unknown ;;
  804. -list-missing) list_missing ;;
  805. -list-cksums) list_cksums ;;
  806. -required) required ;;
  807. -all) all ;;
  808. -import) shift ; import "$@" ;;
  809. -repository) shift ; repository "$@" ;;
  810. -sm) shift ; smpatches "$@" ;;
  811. -*|"") exec $0 --help ;;
  812. *) mapped_packages "$@" ;;
  813. esac
  814. exit 0