#!/bin/bash
#
# --- ROCK-COPYRIGHT-NOTE-BEGIN ---
# 
# This copyright note is auto-generated by ./scripts/Create-CopyPatch.
# Please add additional copyright information _after_ the line containing
# the ROCK-COPYRIGHT-NOTE-END tag. Otherwise it might get removed by
# the ./scripts/Create-CopyPatch script. Do not edit this copyright text!
# 
# ROCK Linux: rock-src/scripts/Download
# ROCK Linux is Copyright (C) 1998 - 2006 Clifford Wolf
# 
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version. A copy of the GNU General Public
# License can be found at Documentation/COPYING.
# 
# Many people helped and are helping developing ROCK Linux. Please
# have a look at http://www.rocklinux.org/ and the Documentation/TEAM
# file for details.
# 
# --- ROCK-COPYRIGHT-NOTE-END ---
#
#   Run this command from the ROCK directory as ./scripts/Download [ options ]
#
#   It enables you to download source files as described in the package 
#   definitions (optionally using a mirroring 'cache' server). 
#
#   This script also allows for checksum display/validation.

umask 022

. scripts/functions

if [ "$1" = '-help' ] ; then
	{ echo
	echo "Usage:"
	echo
	echo " ./scripts/Download [ options ] [ <Package(s)> ]"
	echo " ./scripts/Download [ options ] [ <Desc file(s)> ]"
	echo " ./scripts/Download [ options ] -repository <Repositories>"
	echo " ./scripts/Download [ options ] -sm <SM-PATCH-ID(s)>"
	echo " ./scripts/Download [ options ] { -all | -required }"
	echo 
	echo " Download files required for given packages, package description files, package"
	echo " repositories, import packages, or build configurations."
 	echo " On default, this script auto-detects the best ROCK Linux mirror."
	echo
	echo "	-all		download all files for a build configuration"
	echo "	-required	download only files for packages that will be built in"
	echo "			the given configuration" 
	echo
	echo " Where [ options ] is an alias for:"
	echo "    [ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]"
	echo "    [ -mirror <URL> | -check ] [ -try-questionable ]  [ -notimeout ]"
	echo "    [ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]"
	echo "    [ -proxy <server>[:<port>] ] [ -proxy-auth <username>[:<password>] ]"
	echo "    [ -copy ] [ -move ] [ -cksumpatch ]"
	echo
	echo "	-cfg <config>	download files for the given configuration"
	echo "	-nock		skip checksum checks (don't use lightly)"
	echo "	-alt-dir <AlternativeDirectory>"
	echo "			check for files to download also in the given directory"
	echo "	-mirror <URL>	set the download mirror to use"
	echo "			Mirrors can also be local directories in the form"
	echo "			of 'file:///<dir>'"
	echo "			Use '-mirror auto' to autodetect a new best mirror server"
	echo " 			Use '-mirror none' to bypass the official mirrors"
	echo "	-check		check checksums only; don't download files"
	echo "	-try-questionable	also try to download from URLs marked as"
	echo "				questionable"
	echo "	-notimeout	don't apply timeout settings to curl"
	echo "	-longtimeout	apply long timeout settings"
	echo " 			By default, timeouts for connection and speed-limit"
	echo "			are set"
	echo "	-curl-opt <curl-option>		pass option(s) to curl"
	echo "	-proxy <server>[:<port>]"
	echo "	-proxy-auth <username>[:<password>]" 
	echo "			pass proxy and proxy authentication to curl"
	echo " 			Warning: authentication can be seen with ps!"    
	echo "	-copy		copy files from the old download directory layout,"
	echo "			if existent"
	echo "	-move		move files instead"
	echo "			Default is to link files, if existent, from the old"
	echo "			layout to the new one"
	echo "	-cksumpatch	Patch the checksum in the .desc file after downloading."
	echo
	echo " ./scripts/Download -mk-cksum <Filename(s)>"
	echo " ./scripts/Download [ -list | -list-unknown | -list-missing | -list-cksums ]"
	echo
	echo "	-mk-cksum <Filenames>	calculate checksums on files as used in package"
	echo "				descriptions"
	echo "	-list		all files a complete download directory would contain"
	echo "	-list-cksums	as -list, with a checksum for each file"
	echo "	-list-unknown	files in the download directory that are not in any"
	echo "			package description, e.g. old files"
	echo "	-list-missing	files in package descriptions that are not"
	echo "			downloaded (yet)" 
	echo ; } >&2
	exit 1
fi

# -mk-cksum mode (display ROCK type package checksum): it
# displays the checksum ROCK validates against.
#
# Currently bz2, tbz2, gz, tgz, Z are unpacked
#
if [ "$1" = -mk-cksum ] ; then
    shift
    for x ; do
	echo -n "$x: "
	if [ ! -f "$x" ] ; then
	    echo "No such file."
	elif [ "${x%.bz2}" != "$x" -o "${x%.tbz2}" != "$x" ] ; then
	    bunzip2 < "$x" | cksum | cut -f1 -d' '
	elif [ "${x%.gz}"  != "$x" -o "${x%.tgz}"  != "$x" ] ; then
	    gunzip < "$x" | cksum | cut -f1 -d' '
	elif [ "${x%.Z}" != "$x" ] ; then
	    uncompress < "$x" | cksum | cut -f1 -d' '
	else
	    cksum < "$x" | cut -f1 -d' '
	fi
    done
    exit 1
fi

# Handle options passed on the command line
#
mkdir -p src/ download/ ; config=default
this_is_the_2nd_run=0
mirror='' ; checkonly=0 ; altdir='' ; loop=1
tryques=0 ; nocheck=0 ; options='-this_is_the_2nd_run '
notimeout=0 ; curl_options='--disable-epsv --location'
altcopy=link ; cksumpatch=0
ignore_xpkg=1

if [ -f download/Mirror ]; then mirror="`cat download/Mirror`" ; fi

#
while [ $loop -eq 1 ] ; do
	case "$1" in

	    -this_is_the_2nd_run)
		this_is_the_2nd_run=1 
		shift ;;

	    -cfg)
		options="$options -cfg $2"
		config="$2" ; shift ; shift ;;

	    -nock)
		# -nock skips checksum checking (don't use lightly)
		options="$options -nock"
		nocheck=1 ; shift ;;

	    -mirror)
		# -mirror uses a mirror for finding source files
		if [ "$2" = none ]; then
			echo
			echo "WARNING: The option '-mirror none' is not supported anymore!"
			echo
			echo "WARNING: You may use '-mirror none' if you really want to use the"
			echo "WARNING: original download resources. However, this is not"
			echo "WARNING: supported and if such a download fails, this is not a bug"
			echo "WARNING: in ROCK Linux and doesn't necessarily need fixing."
			echo
		else
			echo "$2" > download/Mirror
		fi
		options="$options -mirror $2"
		mirror="$2"
		shift 2 ;;

	    -check)
		# -check just validates the file using the checksum
		options="$options -check"
		checkonly=1 ; shift ;;

	    -notimeout)
		# don't add timeout curl options
		options="$options -notimeout"
		notimeout=2 ; shift ;;

	    -longtimeout)
		# don't add timeout curl options
		options="$options -longtimeout"
		notimeout=1 ; shift ;;

	    -curl-opt)
		# additional curl options
		options="$options -curl-opt $2"
		curl_options="$curl_options `echo $2 | tr : ' '`"
		shift ; shift ;;

	    -proxy)
		# proxy option for curl
		mkdir -p download
		echo -n "$2" > download/Proxy
		options="$options -proxy $2"
		shift ; shift ;;

	    -proxy-auth)
		# proxy authentication for curl - can be seen with ps!
		mkdir -p download
		echo -n "$2" > download/Proxy-auth
		chmod 600 download/Proxy-auth
		options="$options -proxy-auth $2"
		shift ; shift ;;

	    -alt-dir)
		# check for an alternative directory where to search for
		# package source tarballs
		options="$options -alt-dir $2"
		altdir=$2 ; shift ; shift ;;

	    -try-questionable)
		# also try to download questionable URLs
		options="$options -try-questionable"
		tryques=1 ; shift ;;
	
	    -move) altcopy=move ; shift ;;
	    -copy) altcopy=copy ; shift ;;
	    -cksumpatch) cksumpatch=1 ; shift ;;

	    *)
		loop=0 ;;
	esac
done

if [ $notimeout -eq 0 ] ; then
	curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
fi
if [ $notimeout -eq 1 ] ; then
	curl_options="$curl_options -y 60 -Y 1  --connect-timeout 300"
fi

#Disable checking for certificates on https downloads 
curl_options="$curl_options -k"

# build descparser if needed and run it
# name=value pairs as arguments to this function are passed to the 
# descparser binary as environment variables
descparser() {
        local var
	local descfiles
	if [ ! -f src/descparser ]; then
		mkdir -p src
		cc -o src/descparser misc/tools-source/descparser.c
	fi
        if [ "$ignore_xpkg" == "0" ]; then
             var=""
        else
             var="descparser_ign_xpkg=1"
        fi
	for arg ; do
		case $arg in 
		*=*) var="$var $arg";;
		*) descfiles="$arg $descfiles";;
		esac
	done
	cat $descfiles | eval $var src/descparser
}

# cksum_chk filename cksum origfile
#
# This function verifies the checksum. If it fails it renames the file
# to file.chksum-err and returns failure.
#
# It seams like the [ ] command has problems with comparing high numbers.
# That's why I'm using a text comparison here.
#
# Not doing anything if checksum is '0' or a text of 'X'.
#
cksum_chk() {
	local file="$1" cksum="$2" orig="$3"

	do_cksum_chk $cksum || return 0

	x="`cksum "$file" | cut -f1 -d' ' | sed 's,^0*,,;'`"
	y="`echo $cksum | sed 's,^0*,,;'`"
	if [ "$x" != "$y" ] ; then
	    # Add .cksum-err extension to filename:
		echo "Cksum ERROR: $orig.cksum-err ($x)"
		mv "$orig" "$orig.cksum-err" ; return 1
	fi
	return 0
}

# Determine if a check should be done on files with the given cksum.
#
do_cksum_chk() {
	local cksum="$1" y="`echo $cksum | sed 's,^0*,,;'`";

	[ $nocheck = 1 -o -z "$y" -o -z "${cksum//X/}" ] && return 1
	return 0
}

# Autodetect best Mirror and safe url in $mirror
#
detect_mirror() {
    if [ -n "$mirror" -a "$mirror" != "auto" ] ; then
	if [ "$mirror" = "none" ] ; then
		echo "INFO: Using download mirror: none" \
			"(use the original download locations)" 1>&2
	else
		echo "INFO: Using download mirror:" 1>&2
		echo "INFO: $mirror" 1>&2
	fi
	echo "INFO: To force a new mirror auto-detection, use '-mirror auto'." 1>&2
    else
	echo "INFO: Auto-detecting best mirror ..." 1>&2
	eval "$(egrep '^(rockver)=' scripts/parse-config)"

	echo "INFO: Downloading mirror-list from www.rocklinux.net." 1>&2
	curl -s -S $curl_options -o src/Download-Mirror-List \
		"http://www.rocklinux.net/mirrors.cgi?$rockver"

	bestval=0 ; result='No Mirror Found!'
	while read mirror_name ; do
	  if [ "${mirror_name#=}" != "$mirror_name" ] ; then
		mirror_name="${mirror_name#= }"
		mirror_name="${mirror_name% =}"
		read mirror_url
		echo -n "INFO: Testing <$mirror_name> ..." 1>&2
		val="$(curl -s $curl_options -m 20 "${mirror_url%/}/DOWNTEST" \
			-w "ok %{speed_download}" -o /dev/null)"
		if [ "$val" = "${val#ok }" -o "$val" = "ok 0.000" -o "$val" = "ok 0,000" ] ; then
			echo " error"
		else
			xval=`echo ${val#ok } | tr -d .,` ; echo " $val"
			if [ "$xval" -gt "$bestval" ] ; then
				bestval=$xval ; mirror="${mirror_url%/}"
				result="Using mirror <$mirror>."
			fi
		fi
	  fi
	done < src/Download-Mirror-List
	echo $mirror > download/Mirror
	echo "INFO: $result" 1>&2
    fi
}


# Check if static download location is available
#
is_static() {
	local repo="$1" pkg="$2" gzfile="$3" url="$4"
	
	local file="package/$repo/$pkg/$pkg.desc"
	local pattern="^\[D\].*${gzfile##*/}.*NOAUTO"
	
	if [ "`grep "$file" -e"$pattern"`" != "" ]; then
		mkdir -p "${gzfile%/*}"
		echo "INFO: File ${gzfile##*/} needs to be downloaded manually." 
		echo "INFO: Download the file from"
		echo "INFO:     $url"
		echo "INFO: and place it in ${gzfile%/*}"
		return 1
	fi
	return 0
}

# download_file local-filename download-location cksum repo pkg
#
# This function decides if download directly or from a mirror,
# validates checksum, etc.
# Calls download_file_now to do the actual download.
#
download_file() {

	# Init
	#
	local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
	# Make src directory for creating tar balls
	mkdir -p src/
	# Tarball file name: (if you change this one - also adapt Create-ISO)
	bzfile="`echo "$gzfile" | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,'`"
	# Lock file name:
	lkfile="src/down.lockfile.`echo $bzfile | tr / -`"

	# Check if it's already there
	#
	[ -s "$bzfile" -a $checkonly != 1 ] && return 0

	# Make locking
	#
	if [ -s "$lkfile" ]; then
		echo "Found $lkfile -> skip download."
		return 0
	fi
	trap 'rm -f "$lkfile"' INT
	echo $$ > "$lkfile"

	# Check if we only like to test the cksum(s)
	#
	if [ $checkonly = 1 ] ; then
		gzfile="$bzfile"
		if [ ! -f "$bzfile" ] ; then
			echo "File missing: $bzfile"
			rm -f "$lkfile" ; trap INT ; return 1
		fi
		if [ -z "${cksum##X*}" ] ; then
			echo "No checksum (ignore): $bzfile"
			rm -f "$lkfile" ; trap INT ; return 1
		fi
		if [ "$cksum" -eq 0 ] ; then
			echo "No checksum (missing): $bzfile"
			rm -f "$lkfile" ; trap INT ; return 1
		fi

	elif [ -s "$gzfile" ] ; then

		echo ; echo "Already downloaded $gzfile ..."

	else

		echo ; echo "Downloading $gzfile ..."

		# Existing *.cksum-err
		#
		if [ -s "$gzfile.cksum-err" ] ; then
			# cksum-err file alread exists:
			echo "ERROR: Found $gzfile.cksum-err."
			echo "ERROR: That means that we downloaded the" \
			     "file already and it had an"
			echo "ERROR: incorrect checksum. Remove the" \
			     "*.cksum-err file to force a"
			echo "ERROR: new download of that file."
			rm -f "$lkfile" ; trap INT ; return 1
		fi

		# Existing *.extck-err
		#
		if [ -s "$gzfile.extck-err" ] ; then
			# extck-err file alread exists:
			echo "ERROR: Found $gzfile.extck-err."
			echo "ERROR: That means that we downloaded the" \
			     "file already and it's content"
			echo "ERROR: did not match it's filename extension." \
			     "Remove the *.extck-err file"
			echo "ERROR: to force a new download of that file."
			rm -f "$lkfile" ; trap INT ; return 1
		fi

		# Questionable URL
		#
		if [ "$location" != "${location#\?}" ] ; then
			if [ "$tryques" = 0 ] ; then
				echo "ERROR: URL is marked as questionable." \
					"Not downloading this file."
				rm -f "$lkfile" ; trap INT ; return 1
			else
				echo "WARNING: URL is marked as questionable." \
					"Downloading it anyways."
				location="${location#\?}"
			fi
		fi

		# Make directory (if required)
		#
		if [ ! -d `dirname "$bzfile"` ] ; then
			mkdir -p `dirname "$bzfile"`
		fi
		
		# Alternative Directory
		#
		if [ "$altdir" ] ; then
		    altfile=$(find $altdir/ -name `basename $bzfile` | head -n 1)
		else
		    altfile=""
		fi

		#FIXME: compatibility, can be removed sooner or later...
		# Check old download dir layout
		if [ -z "$altfile" ]; then
			if [ -f "download/$repo${pkg:+/}$pkg/`basename $bzfile`" ]; then
				altfile="download/$repo${pkg:+/}$pkg/`basename $bzfile`"
			fi
		fi

		if [ "$altfile" ] ; then

			echo "Found `basename $bzfile` as $altfile."
			if [ "$altcopy" = 'link' ]; then
				cp -lv $altfile $bzfile
			elif [ "$altcopy" = 'copy' ]; then
				cp -v $altfile $bzfile
			elif [ "$altcopy" = 'move' ]; then
				mv -v $altfile $bzfile
			fi
			gzfile="$bzfile"

		else

		    # Mirroring
		    #

		    if [ -n "$mirror" -a "$mirror" != "none" -a -z "${bzfile##download/mirror/*}" ] ; then
			# try to use mirror

			if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
				echo "INFO: download from mirror failed, trying original URL." 1>&2
				if ! is_static $repo $pkg "$gzfile" "$location"; then
					rm -f "$lkfile" ; trap INT ; return 1
				fi
				download_file_now $location $gzfile $bzfile
			else
				gzfile="$bzfile"
			fi
		    else
			# don't want to use mirror
			if ! is_static $repo $pkg "$gzfile" "$location"; then
				rm -f "$lkfile" ; trap INT ; return 1
			fi
			download_file_now $location $gzfile $bzfile
		    fi
		fi

		if [ ! -s "$gzfile" ]; then
			rm -f "$lkfile" ; trap INT ; return 1
		fi
	fi

	# Convert a .gz to .bz2 and test checksum
	#
	if [ "$gzfile" != "$bzfile" ] ; then
		do_cksum_chk $cksum \
		&& echo "bzip'ing + cksum-test: $gzfile" \
		|| echo "bzip'ing: $gzfile"
		gunzip < "$gzfile" > src/down.$$.dat
		if cksum_chk src/down.$$.dat $cksum "$gzfile" ; then
			bzip2 < src/down.$$.dat > "$bzfile" ; rm -f "$gzfile"
		fi
		rm -f src/down.$$.dat

	# Execute a cksum test on a bzip2 file
	#
	elif [ "${gzfile%.bz2}"  != "$gzfile" -o \
	       "${gzfile%.tbz2}" != "$gzfile" ]
	then
		if [ $nocheck = 0 ] && do_cksum_chk $cksum ; then
			echo "cksum-test (bzip2): $bzfile"
			bunzip2 < "$bzfile" > src/down.$$.dat
			cksum_chk src/down.$$.dat $cksum "$bzfile"
			rm -f src/down.$$.dat
		fi

	# Execute a cksum test on a raw data file
	#
	elif [ $nocheck = 0 ] ; then
		echo "cksum-test (raw): $gzfile"
		cksum_chk "$gzfile" $cksum "$gzfile"
	fi

	# Free Lock and finish
	#
	rm -f "$lkfile" ; trap INT ; return 0
}

# download_file_now location remote_filename local_filename
#
# This function executes the actual download using curl.
#
download_file_now() {
	local location="$1" gzfile="$2" bzfile="$3"

	# Create URL
	#
	if [ "${location#!}" != "$location" ] ; then
		url="`echo "$location" | sed 's,!,,'`"
	else
		url="`echo "$location" | \
			sed 's,/[^/]*$,,'`/`echo $gzfile | sed 's,.*/,,'`"
	fi

	# Check for existing Error Log
	#
	if test -s src/Download-Errors &&
 	   grep -q " $url\$" src/Download-Errors ; then
		echo "ERROR: According to src/Download-Errors" \
		     "we had already an error for the URL"
		echo "ERROR:    $url"
		echo "ERROR: So I'm not trying to download" \
		     "it again (remove src/Download-Errors"
		echo "ERROR: if you want to force a retry)."
		return 1
	fi

	# Download
	#
	if [[ $url = svn://* ]] ; then
		# svn://mode:[login[:password]@]server[:port]:/path::revision/
		urlorig=${url}
		url=${location#!}
		url="${url#svn://}"; url="${url%/}"
		mode="${url%%:*}"
		url="${url#*:}"
		if [ "${url%@*}" = "${url}" ] ; then
			username=""
			password=""
		else
			username="${url%%@*}"
			if [ "${username%:*}" != "${username}" ] ; then
				password="--password ${username#*:}"
				username="${username%%:*}"
			fi
			username="--username ${username}"
		fi
		url="${url##*@}"
		rev="${url##*::}"
		if [ -z "${rev}" -o "${rev}" == "${url}" ] ; then
			rev=""
		else
			rev="-r ${rev}"
		fi
		url="${url%::*}"
		
		old=${PWD}
		tmp="`mktemp`"
		rm -rf ${tmp}
		dir=${bzfile%.tar.bz2}
		dir="`basename ${dir}`"
		mkdir -p ${tmp}
		cd ${tmp}
		echo SVN ${username} ${password} ${rev} ${mode}://${url}
		{ echo svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
		  if ! svn export ${username} ${password} ${rev} ${mode}://${url} ${dir}
		  then touch .svn_error ; fi
		} &> .svn_output &
		while fuser .svn_output &> /dev/null ; do
			echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
				'SVN archive so far...\r'
			sleep 3
		done
		echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
			'SVN archive (download finished).'
		if [ ! -f .svn_error ] ; then
			rm -f .svn_output
			tar --owner root --group root \
			    --use-compress-program=bzip2 \
			    -cf ${dir}.tar.bz2 ${dir}
			mv ${dir}.tar.bz2 ${old}/${bzfile}
			cd ${old} ; rm -rf ${tmp}
		else
			cat .svn_output
			cd $old ; rm -rf $tmp
			echo ERROR: SVN export ${username} ${password} ${mode}://${url} ${dir} \
			            returned an error.
			echo "0 $gzfile $urlorig" >> src/Download-Errors
		fi
	elif [[ $url = cvs://* ]] ; then

		# cvs://mode:[login[:password]@]server[:port]:/path::module!revision/
		# btw, at least current cvs supports password at CVSROOT.

		url="${url#cvs://}"; url="${url%/*}"

		# cvs://mode:loc::module!date/
		#
		mode="${url%%:*}"; loc="${url#*:}"
		module="${loc##*::}"; loc="${loc%%::*}"
		revision="${module#*!}"; module="${module%%!*}"
		[[ $loc != *@* ]] && loc="anonymous@$loc"

		# everything after the first 'bang' (!) is analysed here
		# someday we could add more cvs options.
		#

		dat="$( echo $revision | \
			sed -n -e 's,\([0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}\),-D \1,p' )"

		cvsdir="src/down.cvsdir.`echo $bzfile | tr / -`"
		saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir

		echo CVS $mode $loc $dat $module
		{ [ $mode = ssh ] && export CVS_RSH=ssh
		  [ $mode = pserver ] && loc=":pserver:$loc"
		  # sometimes cvs wants to read ~/.cvspass just for fun ..
		  touch $HOME/.cvspass
		  # for ssh we need some way to quitely accept the key ...
		  echo cvs -z9 -Q -d $loc checkout $dat -P $module
		  if ! cvs -z9 -Q -d $loc checkout $dat -P $module
		  then touch .cvs_error ; fi
		} &> .cvs_output &

		while fuser .cvs_output &> /dev/null ; do
			echo -ne `nice du -sh 2> /dev/null | cut -f1` 'downloaded from' \
				'CVS archive so far...\r'
			sleep 3
		done
		echo `du -sh 2> /dev/null | cut -f1` 'downloaded from' \
			'CVS archive (download finished).'

		if [ ! -f .cvs_error ] ; then
			cd `dirname $module`
			dir="`echo "$bzfile" | sed s/\.tar\.bz2$//`"
			dir="`basename $dir`"

			mv `basename $module` $dir

			tar --owner root --group root \
			    --use-compress-program=bzip2 \
			    -cf $dir.tar.bz2 $dir
			mv $dir.tar.bz2 $saved_pwd/$bzfile

			cd $saved_pwd ; rm -rf $cvsdir
		else
			cat .cvs_output
			cd $saved_pwd ; rm -rf $cvsdir
			echo ERROR: CVS $dat $loc $module \
			            returned an error.
			echo "0 $gzfile $url" >> src/Download-Errors
		fi
	else
		if [ -s "$gzfile.incomplete" ] ; then
		    echo "INFO: Trying to resume previous download .." 1>&2
		    resume="-C -"
		else
		    resume=""
		fi

		curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
		curlret="$?"

		if [ "$resume" ] && \
		   [ $curlret -eq 33 -o $curlret -eq 36 ] ; then
		    echo "INFO: Resuming download not possible. ->" \
		         "Overwriting old file." 1>&2
		    rm -f "$gzfile.incomplete"
		    curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' -f --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
		    curlret="$?"
		fi

		if [ $curlret -ne 0 ] ; then
		    case "$curlret" in
		      18)
		      echo "WARNING: Got only some of the" \
			   "file. A re-run of $0"
		      echo "WARNING: is required to complete" \
			   "the download." ;;
		      130)
		      echo -e '\rWARNING: CURL got a SIGINT' \
			   "(someone pressed Ctrl-C). A re-run of"
		      echo "WARNING: $0 is required to complete" \
			   "the download." ; sleep 1 ;;
		      *)
		      echo "$curlret $gzfile $url" \
					>> src/Download-Errors
		      echo -e '\rERROR: CURL Returned Error' \
			   "$curlret. Please read" \
			   "the curl manpage." ;;
		    esac
		    return 1
		elif [ ! -s "$gzfile.incomplete" ] ; then
		    echo "0 $gzfile $url" >> src/Download-Errors
		    echo "ERROR: CURL returned success but" \
		         "we have no data!"
		    curlret=1
		else
		    case "$gzfile" in
		      *.gz|*.tgz)
			  typeexpr="gzip compressed data" ;;
		      *.bz2|*.tbz2)
			  typeexpr="bzip2 compressed data" ;;
		      *.Z|*.tZ)
			  typeexpr="compress'd data" ;;
		      *.zip|*.jar)
			  typeexpr="Zip archive data" ;;
		      *.tar)
			  typeexpr="tar archive" ;;
		      *)
			  echo "WARNING: Unkown file extension: $gzfile"
			  typeexpr="." ;;
		    esac
		    if file "$gzfile.incomplete" | grep -v "$typeexpr"
		    then
			echo "ERROR: File type does not match" \
			     "filename ($typeexpr)!"
			mv "$gzfile.incomplete" "$gzfile.extck-err"
		    else
			mv "$gzfile.incomplete" "$gzfile"
		    fi
		fi
	fi
}

list_dtags() {
	{ 
		descparser package/*/*/*.desc | grep '^\[D\] '
		grep -h '^[X0-9]' target/*/download.txt | sed 's,^,[D] ,' 
		grep -h '^[X0-9]' target/*/*/download.txt | sed 's,^,[D] ,' 
	} | column_clean
}

list_cksums() {
	trap '' INT

	list_dtags | sed \
			-e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
			-e "s,^$D2re$,\2 download/mirror/\4/\3,;" \
		| sed 's,^\(.*/\)[^/:]*:[^ ]* \([X0-9]*\) ,\2 \1,;' | cut -f1,2 -d' '

	trap INT
}

list() {
	trap '' INT

	list_dtags | sed \
			-e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
			-e "s,^$D2re$,\2 download/mirror/\4/\3,;" | awk '{print $2;}'

	trap INT
}

list_unknown() {
	trap '' INT
	mkdir -p src/ ; list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' > src/down.$$.lst
	ls download/{INDEX,README,DOWNTEST,LAST-UPDATE} \
						>> src/down.$$.lst 2> /dev/null
	find download/* -follow -type f -o -type l | \
	egrep -v '^download/([^/]*(/.)?|mirror/(DOWNTEST|LAST-UPDATE|README))$' | \
	while read fn ; do
		grep -qx "$fn" src/down.$$.lst || echo "Unknown file: $fn"
	done
	rm -f src/down.$$.lst
	trap INT
}

list_missing() {
	if [ -z "${config}" ] ; then
		list_missing_all
		return
	fi
	if [ ! -f config/$config/packages ]; then
		echo "ERROR: Config $config doesn't exist."
		echo "ERROR: try ./scripts/Config -cfg $config first."
		exit 1
	fi
	ignore_xpkg=0
	while read on a b repo pkg c ; do
		forkedpkg=${pkg#*=}
		[ "$forkedpkg" = "$pkg" ] || pkg=${pkg%=*}
		if [ "${on}" = "X" ] ; then
			descparser "xpkg=${forkedpkg}" "package/${repo}/${pkg}/${pkg}.desc" | column_clean | grep '^\[D\]' | \
				sed -e "s,^$D2re[ ].*\($NODISTre\).*$,\2 download/nodist/\4/\3,;" \
				    -e "s,^$D2re$,\2 download/mirror/\4/\3,;" | awk '{print $2;}' | \
				    sed -e 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
				while read fn ; do
					[ -f "${fn}" ] || echo "${fn}"
				done
		fi
	done < config/$config/packages
}

list_missing_all() {
	trap '' INT
	list | sed 's,\.\(t\?\)\(gz\|Z\)$,.\1bz2,' | \
	while read fn ; do
		[ -f "$fn" ] || echo "$fn"
	done
	trap INT
}

repository() {
	for repository ; do
		packages `echo package/$repository/*/*.desc`
	done
}

smpatches() {
	submaster_url="http://www.rocklinux.net/submaster"
	id2url="s#\([0-9]\{4,4\}\)\([0-9]\{2,2\}\)\([0-9]*\)#$submaster_url/data/\1/\2/\3.patch#"

	{
	while [ -n "$1" ]
	do
		# convert $1 (smid) via $id2url to submaster url;
		# convert url to curl input for -K (url = "XXX")
		echo "$1" | sed "/[0-9]\{10\}/ {$id2url ; s/^\(.*\)$/url = \"\1\"/; p; }; d;  "
		shift
	done
	} | curl --progress-bar $curl_options -K -
}

required() {
    # Choosen config must exist
    #
    if [ ! -f config/$config/packages ]; then
        echo "ERROR: Config $config doesn't exist."
        echo "ERROR: try ./scripts/Config -cfg $config first."
        exit 1
    fi
    
    ignore_xpkg=0

    local forkedpkg
    while read on a b repo pkg c ; do
	forkedpkg=${pkg#*=}
	[ "$forkedpkg" = "$pkg" ] || pkg=${pkg%=*}

	if [ "$on" = "X" ] ; then
	    descparser "xpkg=$forkedpkg" "package/$repo/$pkg/$pkg.desc" | grep '^\[D\] ' > src/down.$$.lst
	    while read tag cksum file url flags ; do
		download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
	    done < src/down.$$.lst ; rm -f src/down.$$.lst
	fi
    done < config/$config/packages

    target=`grep '^export ROCKCFG_TARGET=' config/$config/config | \
						cut -f2 -d= | tr -d "'"`

    arch=`grep '^export ROCKCFG_ARCH=' config/$config/config | \
						cut -f2 -d= | tr -d "'"`
    for targetdl in target/$target/{,$arch/}download.txt ; do
        if [ -f $targetdl ] ; then
            while read cksum file url flags ; do
                download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
            done < $targetdl
        fi
    done
}

all() {
    local each repo pkg

    for each in package/*/*/*.desc; do
	pkg="`echo $each | cut -f3 -d/`"
	repo="`echo $each | cut -f2 -d/`"

	while read tag cksum file url flags ; do
	    download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$repo" "$pkg"
	done < <(descparser package/$repo/$pkg/$pkg.desc | grep '^\[D\] ')
    done
    
    for each in target/*/download.txt target/*/*/download.txt; do
	target="`echo $each | cut -f2 -d/`"

	while read cksum file url flags ; do
	    download_file "`source_file cksum $file url $flags`" "$url" "$cksum" "$target"
	done < <(cat $each)
    done
}

packages() {
	local descfile
	local forkedpkg
	for arg; do
		# Check if this is a forked package name
		case "$arg" in
		*=*) ignore_xpkg=0; forkedpkg="xpkg=${arg#*=}"; arg=${arg%=*};;
		*) ignore_xpkg=1; forkedpkg="";;
		esac
		case "$arg" in
		target/*)
			if [ ! -f $arg ]; then
			    echo "Skipping \"$arg\" (not found)!"
			    continue
			fi

			target="`echo $arg | cut -f2 -d/`"

			while read cksum file url flags ; do
			    download_file "`source_file cksum $file url $flags`" \
				"$url" "$cksum" "$target"
			done < <(cat $arg)
			;;
		*)
			if [ ! "${arg%.desc}" = "$arg" ]; then
			    descfile=$arg
			else
			    descfile="`echo package/*/$arg/$arg.desc`"
			fi
			
			if [ ! -f $descfile ]; then
			    echo "Skipping \"$arg\" (not found)!"
			    continue
			fi
			
			pkg="`echo $descfile | cut -f3 -d/`"
			repo="`echo $descfile | cut -f2 -d/`"
			
			while read tag cksum file url flags ; do
			    download_file "`source_file cksum $file url $flags`" \
				"$url" "$cksum" "$repo" "$pkg"
			done < <(descparser $forkedpkg package/$repo/$pkg/$pkg.desc |
				grep '^\[D\] ')

			if [ $cksumpatch = 1 ]; then
				./scripts/Create-CkSumPatch $pkg | patch -p0
			fi
			;;
		esac
	done
}

mapped_packages() {
	if [ ! -f src/pkgmapper ]
	then
		mkdir -p src
		bash scripts/xfind.sh package/. -type f -name 'pkgmapper.in' \
			-printf '%f\t%p\n' | sort | awk '{ $1="."; print; }' > src/pkgmapper
	fi
	for pkg; do
 		export xpkg=${pkg#*=}
		. src/pkgmapper
 		packages ${pkg%=*}=$xpkg
	done
}

# Things to do only for downloading
#
if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
	# Set proxy information
	if [ -f download/Proxy ]; then
		proxy="`cat download/Proxy`"
		if [ "$proxy" ]; then
			curl_options="$curl_options --proxy $proxy"
		else
			echo "INFO: No proxy information... removing download/Proxy." 1>&2
			rm download/Proxy
		fi
	fi
	if [ -f download/Proxy-auth ]; then
		proxyauth="`cat download/Proxy-auth`"
		if [ "$proxyauth" ]; then
			curl_options="$curl_options --proxy-user $proxyauth"
		else
			echo "INFO: No proxy-auth information... removing download/Proxy-auth." 1>&2
			rm download/Proxy-auth
 		fi
 	fi

	# Thing to do only once 
	#
	if [ $this_is_the_2nd_run = 0 ]; then
		# am i using a proxy?
		# -- say i'm doing it even when i already did ;-)
		if [ "$proxy" ]; then
			echo "INFO: Setting proxy to $proxy." 1>&2
		fi
		if [ "$proxyauth" ]; then
			echo "INFO: Setting proxy authentication information." 1>&2
		fi

		# do mirror detection
		detect_mirror
	fi
fi

case "$1" in
	-list)		list ;;
	-list-dtags)	list_dtags ;;
	-list-unknown)	list_unknown ;;
	-list-missing)	list_missing ;;
	-list-cksums)	list_cksums ;;

	-required)	required ;;
	-all)		all ;;
	-repository)	shift ; repository "$@" ;;

	-sm) shift ; smpatches "$@" ;;

	-*|"")		exec $0 -help ;;

	*)		mapped_packages "$@" ;;
esac

exit 0