#!/bin/bash
|
|
# --- SDE-COPYRIGHT-NOTE-BEGIN ---
|
|
# This copyright note is auto-generated by ./scripts/Create-CopyPatch.
|
|
#
|
|
# Filename: bin/sde-download
|
|
# Copyright (C) 2006 - 2011 The OpenSDE Project
|
|
# Copyright (C) 2004 - 2006 The T2 SDE Project
|
|
# Copyright (C) 1998 - 2003 Clifford Wolf
|
|
#
|
|
# More information can be found in the files COPYING and README.
|
|
#
|
|
# This program is free software; you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation; version 2 of the License. A copy of the
|
|
# GNU General Public License can be found in the file COPYING.
|
|
# --- SDE-COPYRIGHT-NOTE-END ---
|
|
|
|
#Description: Download sources
|
|
#Alias: get
|
|
|
|
set -e
|
|
|
|
[ -n "$SDEROOT" ] ||
|
|
export SDEROOT=$( cd "${0%/*}/.."; pwd -P )
|
|
|
|
. $SDEROOT/lib/libsde.in
|
|
. $SDEROOT/lib/functions.in
|
|
|
|
download_usage() {
|
|
cat <<EOT
|
|
Usage:
|
|
sde download <options> [ Package(s) ]
|
|
sde download <options> [ Desc file(s) ]
|
|
sde download <options> -repository Repositories
|
|
sde download <options> [ -all | -required ]
|
|
|
|
Options is an alias for:
|
|
[ -cfg <config> ] [ -nock ] [ -alt-dir <AlternativeDirectory> ]
|
|
[ -mirror <URL> | -check ] [ -try-questionable ] [ -notimeout ]
|
|
[ -longtimeout ] [ -curl-opt <curl-option>[:<curl-option>[:..]] ]
|
|
[ -copy ] [ -move ]
|
|
|
|
On default, this script auto-detects the best OpenSDE mirror.
|
|
|
|
Mirrors can also be a local directories in the form of 'file:///<dir>'.
|
|
|
|
sde download [ -list | -list-missing | -list-cksums ]
|
|
|
|
See '-mirror none' output for help on bypassing the official mirrors.
|
|
EOT
|
|
}
|
|
|
|
umask 022
|
|
|
|
cd "$SDEROOT"
|
|
|
|
# Handle options passed on the command line
|
|
#
|
|
mkdir -p tmp/ download/
|
|
|
|
# determine version of the package tree
|
|
pkgver=$("$SDEROOT/lib/version.sh" -p)
|
|
xpkgver=$(echo "$pkgver" | tr '.' '_')
|
|
|
|
# Load system wide configuration for this tool
|
|
#
|
|
config=default mirror= mirror_last= altdir= proxy= proxyauth= notimeout=0
|
|
|
|
if [ -s "$SDESETTINGS" ]; then
|
|
eval $( $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" download )
|
|
eval $( $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" download-$xpkgver )
|
|
fi
|
|
|
|
this_is_the_2nd_run=0
|
|
checkonly=0 tryques=0 nocheck=0
|
|
options='-this_is_the_2nd_run '
|
|
curl_options='-A opensde-downloader --disable-epsv --location -f'
|
|
altcopy=link ; verbose=1
|
|
downloaderror=0
|
|
|
|
#
|
|
while [ $# -gt 0 ]; do
|
|
case "$1" in
|
|
|
|
-this_is_the_2nd_run)
|
|
this_is_the_2nd_run=1
|
|
;;
|
|
|
|
-cfg)
|
|
options="$options -cfg $2"
|
|
config="$2" ; shift ;;
|
|
|
|
-q)
|
|
options="$options -q"
|
|
verbose=0 ;;
|
|
|
|
-nock)
|
|
# -nock skips checksum checking (don't use lightly)
|
|
options="$options -nock"
|
|
nocheck=1 ; shift ;;
|
|
|
|
-mirror)
|
|
# -mirror uses a mirror for finding source files
|
|
if [ "$2" = none ]; then
|
|
echo
|
|
echo "The option '-mirror none' is not supported anymore!"
|
|
echo
|
|
echo "You may edit \$HOME/.sde/settings if you really"
|
|
echo "want to use the original download resources. However, this"
|
|
echo "is not supported and if such a download fails, this is not"
|
|
echo "a bug in the OpenSDE and doesn't neccessarily needs fixing."
|
|
echo
|
|
exit 1;
|
|
elif [ "$2" = auto ]; then
|
|
mirror=
|
|
mirror_last=
|
|
else
|
|
options="$options -mirror $2"
|
|
mirror="$2"
|
|
mirror_last=$(date +%s)
|
|
$SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" \
|
|
"download-$xpkgver.mirror=$mirror" \
|
|
"download-$xpkgver.mirror_last=$mirror_last"
|
|
fi
|
|
shift ;;
|
|
|
|
-check)
|
|
# -check just validates the file using the checksum
|
|
options="$options -check"
|
|
checkonly=1 ;;
|
|
|
|
-notimeout)
|
|
# don't add timeout curl options
|
|
options="$options -notimeout"
|
|
notimeout=2 ;;
|
|
|
|
-longtimeout)
|
|
# don't add timeout curl options
|
|
options="$options -longtimeout"
|
|
notimeout=1 ;;
|
|
|
|
-curl-opt)
|
|
# additional curl options
|
|
options="$options -curl-opt $2"
|
|
curl_options="$curl_options `echo $2 | tr : ' '`"
|
|
shift ;;
|
|
|
|
-alt-dir)
|
|
# check for an alternative directory where to search for
|
|
# package source tarballs
|
|
altdir=$( cd "$2" && pwd -P )
|
|
$SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" \
|
|
"download.altdir=$altdir"
|
|
shift ;;
|
|
|
|
-try-questionable)
|
|
# also try to download questionable URLs
|
|
options="$options -try-questionable"
|
|
tryques=1 ;;
|
|
|
|
-move) altcopy=move ;;
|
|
-copy) altcopy=copy ;;
|
|
|
|
*) break ;;
|
|
esac
|
|
shift
|
|
done
|
|
|
|
if [ $notimeout -eq 0 ] ; then
|
|
curl_options="$curl_options -y 10 -Y 10 --connect-timeout 60"
|
|
fi
|
|
if [ $notimeout -eq 1 ] ; then
|
|
curl_options="$curl_options -y 60 -Y 1 --connect-timeout 300"
|
|
fi
|
|
|
|
# proxy (server[:port])
|
|
if [ -n "$proxy" ]; then
|
|
curl_options="$curl_options --proxy $proxy"
|
|
|
|
# proxy needs auth (username[:password])
|
|
[ -z "$proxyauth" ] || curl_options="$curl_options --proxy-user $proxyauth"
|
|
|
|
# only show once
|
|
[ $this_is_the_2nd_run = 1 ] || echo_info "Using <$proxy> as ${proxyauth:+authenticated }http proxy."
|
|
fi
|
|
|
|
#Disable checking for certificates on https downloads
|
|
curl_options="$curl_options -k"
|
|
|
|
# Autodetect best Mirror and safe url in $mirror
|
|
#
|
|
detect_mirror() {
|
|
local age=
|
|
|
|
if [ "$mirror" = "none" ] ; then
|
|
echo_info "Using original download locations only."
|
|
return
|
|
elif [ "$mirror" = "broken" ]; then
|
|
echo_warning "Previous detection of the mirror failed, trying again."
|
|
elif [ -n "$mirror" ]; then
|
|
age=$(expr `date +%s` - ${mirror_last:-0})
|
|
age=$(expr $age / 3600)
|
|
if [ $age -gt 24 ]; then
|
|
echo_warning "Mirror choice <$mirror> is old, checking again."
|
|
mirror=
|
|
mirror_last=
|
|
else
|
|
echo_info "Using mirror <$mirror>."
|
|
return
|
|
fi
|
|
fi
|
|
|
|
echo_warning "Auto-detecting best mirror ..."
|
|
|
|
echo_info "Downloading mirror-list from opensde.net."
|
|
curl -s -S $curl_options -o tmp/Download-Mirror-List \
|
|
"http://opensde.net/opensde-download-mirrors/$pkgver"
|
|
|
|
if [ -r tmp/Download-Mirror-List ]; then
|
|
bash lib/sde-download/mirror-test.sh < tmp/Download-Mirror-List
|
|
fi 2>&1 | echo_info
|
|
|
|
# read new mirror info
|
|
mirror=
|
|
eval $( $SDEROOT/bin/sde-config-ini -F "$SDESETTINGS" download-$xpkgver )
|
|
|
|
if [ -z "$mirror" ]; then
|
|
echo_error "Mirror detection loop hit a bug!"
|
|
elif [ "$mirror" = "broken" ]; then
|
|
echo_warning "No Mirror Found!"
|
|
else
|
|
echo_info "Using mirror <$mirror>."
|
|
fi
|
|
}
|
|
|
|
download_file_desc() {
|
|
sed -n -e 's|^\[D\][ \t]\+||p' "package/$1/$2/$2.desc" |
|
|
download_file_pipe "$1" "$2"
|
|
}
|
|
|
|
download_file_pipe() {
|
|
local filename=
|
|
while read cksum file url; do
|
|
filename=$(source_file cksum $file "$url")
|
|
download_file "$filename" "$url" "$cksum" "$@"
|
|
done
|
|
}
|
|
|
|
# download_file local-filename download-location cksum repo pkg
|
|
#
|
|
# This function decides if download directly or from a mirror,
|
|
# validates checksum, etc.
|
|
# Calls download_file_now to do the actual download.
|
|
#
|
|
download_file() {
|
|
|
|
# Init
|
|
#
|
|
local gzfile="$1" location="$2" cksum="$3" repo="$4" pkg="$5"
|
|
# Make src directory for creating tar balls
|
|
mkdir -p tmp/
|
|
# Tarball file name:
|
|
bzfile="`bz2filename "$gzfile"`"
|
|
# Remove optional '-' prefix from $location
|
|
[ "${location:0:1}" = '-' ] && location="${location:1}"
|
|
# Lock file name:
|
|
lkfile="tmp/down.lockfile.`echo $bzfile | tr / -`"
|
|
|
|
# Check if it's already there
|
|
#
|
|
[ -s "$bzfile" -a $checkonly != 1 ] && return 0
|
|
|
|
# Make locking
|
|
#
|
|
if [ -s "$lkfile" ]; then
|
|
echo "Found $lkfile -> skip download."
|
|
return 0
|
|
fi
|
|
trap 'rm -f "$lkfile"' INT
|
|
echo $$ > "$lkfile"
|
|
|
|
# Check if we only like to test the cksum(s)
|
|
#
|
|
if [ $checkonly = 1 ] ; then
|
|
gzfile="$bzfile"
|
|
if [ ! -f "$bzfile" ] ; then
|
|
echo "File missing: $bzfile"
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
if [ -z "${cksum##X*}" ] ; then
|
|
echo "No checksum (ignore): $bzfile"
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
if [ "$cksum" -eq 0 ] ; then
|
|
echo "No checksum (missing): $bzfile"
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
|
|
elif [ -s "$gzfile" ] ; then
|
|
|
|
echo ; echo "Already downloaded $pkg:$gzfile ..."
|
|
|
|
else
|
|
|
|
echo ; echo "Downloading $pkg:$gzfile ..."
|
|
|
|
# Existing *.cksum-err
|
|
#
|
|
if [ -s "$gzfile.cksum-err" ] ; then
|
|
# cksum-err file alread exists:
|
|
echo "ERROR: Found $gzfile.cksum-err."
|
|
echo "ERROR: That means that we downloaded the" \
|
|
"file already and it had an"
|
|
echo "ERROR: incorrect checksum. Remove the" \
|
|
"*.cksum-err file to force a"
|
|
echo "ERROR: new download of that file."
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
|
|
# Existing *.extck-err
|
|
#
|
|
if [ -s "$gzfile.extck-err" ] ; then
|
|
# extck-err file alread exists:
|
|
echo "ERROR: Found $gzfile.extck-err."
|
|
echo "ERROR: That means that we downloaded the" \
|
|
"file already and it's content"
|
|
echo "ERROR: did not match it's filename extension." \
|
|
"Remove the *.extck-err file"
|
|
echo "ERROR: to force a new download of that file."
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
|
|
# Questionable URL
|
|
#
|
|
if [ "$location" != "${location#\?}" ] ; then
|
|
if [ "$tryques" = 0 ] ; then
|
|
echo "ERROR: URL is marked as questionable." \
|
|
"Not downloading this file."
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
else
|
|
echo "WARNING: URL is marked as questionable." \
|
|
"Downloading it anyways."
|
|
location="${location#\?}"
|
|
fi
|
|
fi
|
|
|
|
# Make directory (if required)
|
|
#
|
|
if [ ! -d `dirname "$bzfile"` ] ; then
|
|
mkdir -p `dirname "$bzfile"`
|
|
fi
|
|
|
|
# Alternative Directory
|
|
#
|
|
if [ -d "$altdir" ] ; then
|
|
altfile=$(find -L "$altdir/" -name `basename $bzfile` 2> /dev/null |
|
|
head -n 1)
|
|
else
|
|
altfile=
|
|
fi
|
|
|
|
if [ -s "$altfile" ] ; then
|
|
|
|
echo "Found `basename $bzfile` as $altfile."
|
|
case "$altcopy" in
|
|
copy)
|
|
cp "$altfile" "$bzfile" ;;
|
|
move)
|
|
mv "$altfile" "$bzfile" ;;
|
|
*) #link
|
|
cp -l "$altfile" "$bzfile" ;;
|
|
esac
|
|
gzfile="$bzfile"
|
|
|
|
else
|
|
|
|
# Mirroring
|
|
#
|
|
if [ -n "$mirror" -a "$mirror" != "none" -a "$mirror" != "broken" -a -z "${bzfile##download/mirror/*}" ] ; then
|
|
# try to use mirror
|
|
|
|
if ! download_file_now "!$mirror/${bzfile#download/mirror/}" $bzfile $bzfile; then
|
|
echo "INFO: download from mirror failed, trying original URL."
|
|
download_file_now "$location" $gzfile $bzfile ||
|
|
downloaderror=1
|
|
else
|
|
gzfile="$bzfile"
|
|
fi
|
|
else
|
|
# don't want to use mirror
|
|
download_file_now "$location" $gzfile $bzfile ||
|
|
downloaderror=1
|
|
fi
|
|
fi
|
|
|
|
if [ ! -s "$gzfile" ]; then
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
fi
|
|
|
|
# unsign .gpg file
|
|
if expr "$gzfile" : "*.gpg" > /dev/null; then
|
|
gzfile=${gzfile%.gpg}
|
|
if [ -f $gzfile.gpg ]; then
|
|
echo "unsigning GnuPG file: $gzfile.gpg"
|
|
gpg $gzfile.gpg
|
|
fi
|
|
if [ ! -f $gzfile ]; then
|
|
echo "unsigning failed"
|
|
rm -f "$lkfile" ; trap INT ; return 1
|
|
fi
|
|
fi
|
|
|
|
sh ./lib/sde-download/validate.sh "$gzfile" "$bzfile" "$cksum" || downloaderror=1
|
|
|
|
# Free Lock and finish
|
|
#
|
|
rm -f "$lkfile" ; trap INT ; return 0
|
|
}
|
|
|
|
# download_file_now location remote_filename local_filename
|
|
#
|
|
# This function executes the actual download using curl.
|
|
#
|
|
download_file_now() {
|
|
local location="$1" gzfile="$2" bzfile="$3" curlret=0
|
|
|
|
# Create URL
|
|
#
|
|
case "$location" in
|
|
manual://*) url="$location" ;;
|
|
!*) url="${location#!}" ;;
|
|
*) url="${location%/*}/${gzfile##*/}" ;;
|
|
esac
|
|
|
|
# Download
|
|
#
|
|
case "$url" in
|
|
manual://*)
|
|
# Determine if the file has already been downloaded
|
|
# manually. For this we first look in $HOME then in
|
|
# download/manual.
|
|
downloadpath=${altdir:-$HOME}
|
|
downloadfile="${gzfile##*/}"
|
|
if [ -e $downloadpath/$downloadfile ]; then
|
|
location="file://$downloadpath/"
|
|
else
|
|
location="http://${url#manual://}"
|
|
# No manual download has taken place yet.
|
|
# So inform the user to do so.
|
|
cat <<-EOT
|
|
The file $downloadfile can not be fetched automatically
|
|
please visit: $location
|
|
and download it manually into $HOME or somewhere else using -alt-dir
|
|
EOT
|
|
return 1;
|
|
fi
|
|
|
|
# I am to lazy to do the copy and conversion myself,
|
|
# so I use this function again with a modified
|
|
# download location.
|
|
download_file_now "$location" $gzfile $bzfile
|
|
return "$?"
|
|
;;
|
|
http://*|https://*|ftp://*|file://*)
|
|
if [ -s "$gzfile.incomplete" ] ; then
|
|
echo "INFO: Trying to resume previous download .."
|
|
resume="-C -"
|
|
else
|
|
resume=
|
|
fi
|
|
|
|
if [ -s download/translations.sed ]; then
|
|
trfile=download/translations.sed
|
|
else
|
|
trfile=etc/download.sed
|
|
fi
|
|
|
|
trurl="$( echo "$url" | sed -f $trfile )"
|
|
if [ -n "$trurl" -a "$trurl" != "$url" ]; then
|
|
echo "INFO: url translated."
|
|
url="$trurl"
|
|
fi
|
|
unset trurl trfile
|
|
|
|
curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' --progress-bar $resume $curl_options "$url" -o "$gzfile.incomplete"
|
|
curlret="$?"
|
|
|
|
if [ "$resume" ] && \
|
|
[ $curlret -eq 33 -o $curlret -eq 36 ] ; then
|
|
echo "INFO: Resuming download not possible. ->" \
|
|
"Overwriting old file."
|
|
rm -f "$gzfile.incomplete"
|
|
curl -w '\rFinished downloading %{size_download} bytes in %{time_total} seconds (%{speed_download} bytes/sec). \n' --progress-bar $curl_options "$url" -o "$gzfile.incomplete"
|
|
curlret="$?"
|
|
fi
|
|
|
|
if [ $curlret -ne 0 ] ; then
|
|
case "$curlret" in
|
|
18)
|
|
echo "WARNING: Got only some of the" \
|
|
"file. A re-run of $0"
|
|
echo "WARNING: is required to complete" \
|
|
"the download." ;;
|
|
130)
|
|
$ECHO_E '\rWARNING: CURL got a SIGINT' \
|
|
"(someone pressed Ctrl-C). A re-run of"
|
|
echo "WARNING: $0 is required to complete" \
|
|
"the download." ; sleep 1 ;;
|
|
*)
|
|
echo "$curlret $gzfile $url" \
|
|
>> tmp/Download-Errors
|
|
$ECHO_E '\rERROR: CURL Returned Error' \
|
|
"$curlret. Please read" \
|
|
"the curl manpage." ;;
|
|
esac
|
|
return 1
|
|
elif [ ! -s "$gzfile.incomplete" ] ; then
|
|
echo "0 $gzfile $url" >> tmp/Download-Errors
|
|
echo "ERROR: CURL returned success but" \
|
|
"we have no data!"
|
|
curlret=1
|
|
else
|
|
case "$gzfile" in
|
|
*.gz|*.tgz)
|
|
typeexpr="gzip compressed data" ;;
|
|
*.bz2|*.tbz2|*.tbz)
|
|
typeexpr="bzip2 compressed data" ;;
|
|
*.xz)
|
|
typeexpr="xz compressed data" ;;
|
|
*.Z|*.tZ)
|
|
typeexpr="compress'd data" ;;
|
|
*.zip|*.jar)
|
|
typeexpr="Zip archive data" ;;
|
|
*.tar)
|
|
typeexpr="tar archive" ;;
|
|
*)
|
|
echo "WARNING: Unknown file extension: $gzfile"
|
|
typeexpr="." ;;
|
|
esac
|
|
if file "$gzfile.incomplete" | grep -v "$typeexpr"
|
|
then
|
|
echo "ERROR: File type does not match" \
|
|
"filename ($typeexpr)!"
|
|
mv "$gzfile.incomplete" "$gzfile.extck-err"
|
|
else
|
|
mv "$gzfile.incomplete" "$gzfile"
|
|
fi
|
|
fi
|
|
;;
|
|
*)
|
|
protocol="${url%%://*}"
|
|
|
|
# we need to use $location - $url is already mangled above -ReneR
|
|
# $protocol://$url $options
|
|
url="`echo "$location" | sed "s,$protocol://\([^ ]*\).*,\1,"`"
|
|
options="`echo "$location" | cut -d' ' -f2-`"
|
|
|
|
case "$protocol" in
|
|
cvs)
|
|
# the first option is the module name
|
|
module="${options%% *}"
|
|
options="${options#* }"
|
|
cmdline="cvs -z4 -Q -d $url co -P $options $module"
|
|
|
|
# sometimes cvs wants to read ~/.cvspass just for fun ..
|
|
touch $HOME/.cvspass
|
|
;;
|
|
svn|svn\+http)
|
|
if [ "$protocol" = "svn+http" ]; then
|
|
url="http://$url"
|
|
else
|
|
url="svn://$url"
|
|
fi
|
|
|
|
if [ "${options:0:1}" = "-" ]; then
|
|
# the module is the last dir of $url
|
|
module="${url##*/}"
|
|
else
|
|
# the first option is the module name
|
|
module="${options%% *}"
|
|
options="${options#* }"
|
|
fi
|
|
cmdline="svn co $options $url $module"
|
|
;;
|
|
*)
|
|
echo "$cmdclient unrecognized!"
|
|
return 1
|
|
;;
|
|
esac
|
|
|
|
cvsdir="tmp/down.${protocol}dir.`echo $bzfile | tr / -`"
|
|
saved_pwd=$PWD ; mkdir -p $cvsdir ; cd $cvsdir
|
|
|
|
echo "$cmdline"
|
|
{
|
|
$cmdline || touch .cvs_error
|
|
} &> .cvs_output &
|
|
|
|
while fuser .cvs_output &> /dev/null ; do
|
|
$ECHO_E -n `nice du -sh 2> /dev/null | \
|
|
cut -f1` 'downloaded from archive so far...\r'
|
|
sleep 3
|
|
done
|
|
|
|
if [ -f .cvs_error ] ; then
|
|
cd $saved_pwd ; rm -rf $cvsdir
|
|
$ECHO_E "\nError during checkout."
|
|
return 1
|
|
fi
|
|
|
|
echo `du -sh 2> /dev/null | \
|
|
cut -f1` 'downloaded from archive (download finished).'
|
|
|
|
|
|
if [ `echo * | wc -w` -gt 1 ]; then
|
|
# multi-module module
|
|
echo "Multi-module package detected, relocating..."
|
|
mkdir t2-module.$$
|
|
for x in *; do
|
|
[ "$x" != "t2-module.$$" ] && mv -f $x t2-module.$$/
|
|
done
|
|
mkdir -p "$module"
|
|
mv -f t2-module.$$/* "$module"
|
|
rm -f t2-module.$$
|
|
fi
|
|
|
|
cd `dirname $module`
|
|
tarname="`basename $bzfile`"
|
|
echo "Preparing files for final tarball ..."
|
|
find -type d \( -name CVS -o -name .svn \) | xargs rm -rf
|
|
|
|
if [ `find -type f | wc -l` -gt 4 ]; then
|
|
find `basename $module` | xargs touch -t 200001010000
|
|
tar --owner root --group root \
|
|
--use-compress-program=bzip2 \
|
|
-cf $tarname `basename $module`
|
|
mv $tarname $saved_pwd/$bzfile
|
|
else
|
|
echo "Too few files - assuming checkout failure."
|
|
curlret=1
|
|
fi
|
|
|
|
cd $saved_pwd ; rm -rf $cvsdir
|
|
;;
|
|
esac
|
|
return $curlret
|
|
}
|
|
|
|
list_dtags() {
|
|
{
|
|
grep -H '^\[D\] ' package/*/*/*.desc
|
|
grep -H '^[X0-9]' target/*/download.txt 2> /dev/null | sed 's,:,:[D] ,'
|
|
} | column_clean
|
|
}
|
|
|
|
list_cksums() {
|
|
trap '' INT
|
|
|
|
# we know we only have single spaces due to list_dtags' column_clean
|
|
list_dtags | sed -n \
|
|
-e 's,[^ ]* \([X0-9]*\) \(.\)\([^ ]*\) -.*,\1 download/local/\2/\2\3,p' \
|
|
-e 's,[^ ]* \([X0-9]*\) \(.\)\([^ ]*\) [^-].*,\1 download/mirror/\2/\2\3,p'
|
|
|
|
trap INT
|
|
}
|
|
|
|
list() {
|
|
trap '' INT
|
|
list_cksums | cut -f2- -d' '
|
|
trap INT
|
|
}
|
|
|
|
list_missing() {
|
|
trap '' INT
|
|
list | bz2filename | \
|
|
while read fn ; do
|
|
[ -f "$fn" ] || echo "$fn"
|
|
done
|
|
trap INT
|
|
}
|
|
|
|
repository() {
|
|
for repository ; do
|
|
packages `echo package/$repository/*/*.desc`
|
|
done
|
|
}
|
|
|
|
required() {
|
|
# Choosen config must exist
|
|
#
|
|
if ! ./lib/sde-config/migrate.sh "$config"; then
|
|
echo "ERROR: Config $config doesn't exist."
|
|
echo "ERROR: try ./scripts/Config -cfg $config first."
|
|
exit 1
|
|
fi
|
|
|
|
while read on a b repo pkg c ; do
|
|
if [ "$on" = "X" ] ; then
|
|
download_file_desc "$repo" "$pkg"
|
|
fi
|
|
done < config/$config/packages
|
|
|
|
target=`grep '^export SDECFG_TARGET=' config/$config/config | \
|
|
cut -f2 -d= | tr -d "'"`
|
|
targetchain="$target"; x="$target"
|
|
while [ -f "target/$x/extends" ]; do
|
|
x="`cat target/$x/extends`"
|
|
targetchain="$targetchain $x"
|
|
done
|
|
|
|
for target in $targetchain; do
|
|
if [ -f target/$target/download.txt ] ; then
|
|
download_file_pipe "$target" < target/$target/download.txt
|
|
fi
|
|
done
|
|
}
|
|
|
|
all() {
|
|
local each repo pkg
|
|
|
|
for repo in $( cd package; ls -1 ); do
|
|
[ -d "package/$repo/" ] || continue
|
|
for each in package/$repo/*/*.desc; do
|
|
[ -r "$each" ] || continue
|
|
pkg="`echo $each | cut -f3 -d/`"
|
|
|
|
download_file_desc "$repo" "$pkg"
|
|
done
|
|
done
|
|
|
|
for each in $( ls -1 target/*/download.txt 2> /dev/null ); do
|
|
target="`echo $each | cut -f2 -d/`"
|
|
|
|
download_file_pipe "$target" < "$each"
|
|
done
|
|
}
|
|
|
|
package() {
|
|
descfile="`echo package/*/$1/$1.desc`"
|
|
|
|
if [ ! -f $descfile ]; then
|
|
echo "Skipping \"$1\" (not found)!"
|
|
return
|
|
fi
|
|
|
|
pkg="`echo $descfile | cut -f3 -d/`"
|
|
repo="`echo $descfile | cut -f2 -d/`"
|
|
|
|
download_file_desc "$repo" "$pkg"
|
|
}
|
|
|
|
packages() {
|
|
local descfile
|
|
for arg; do
|
|
case "$arg" in
|
|
target/*)
|
|
if [ ! -f $arg ]; then
|
|
echo "Skipping \"$arg\" (not found)!"
|
|
continue
|
|
fi
|
|
|
|
target="`echo $arg | cut -f2 -d/`"
|
|
|
|
download_file_pipe "$target" < "$arg"
|
|
;;
|
|
*)
|
|
if [ "${arg%.desc}" != "$arg" ]; then
|
|
arg="`echo $arg | cut -f3 -d/`"; fi
|
|
|
|
|
|
# active extensions
|
|
local extender=
|
|
|
|
# pkg_*_{pre,post}.conf is only activated if extender
|
|
# is enabled on $config/packages, so we will only
|
|
# download files of those extenders
|
|
#
|
|
for extender in `ls -1 package/*/*/pkg_${arg}_{pre,post}.conf 2> /dev/null |
|
|
cut -d/ -f3 | sort -u`; do
|
|
if grep -q "^X .* $extender " \
|
|
config/$config/packages; then
|
|
echo_info "Also downloading $extender ..."
|
|
package $extender
|
|
fi
|
|
done
|
|
package $arg
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
set +e
|
|
|
|
# Things to do only for downloading
|
|
#
|
|
if [ "${1:0:5}" != "-list" -a $checkonly = 0 ]; then
|
|
# we need curl
|
|
if [ -z "`type -p curl`" ]; then
|
|
echo_abort 2 "we need \`curl\` installed and available on \$PATH to proceed."
|
|
fi
|
|
|
|
# do mirror detection, only once
|
|
[ $this_is_the_2nd_run = 1 ] || detect_mirror
|
|
fi
|
|
|
|
case "$1" in
|
|
-list) list ;;
|
|
-list-missing) list_missing ;;
|
|
-list-cksums) list_cksums ;;
|
|
|
|
-required) required ;;
|
|
-all) all ;;
|
|
|
|
-repository) shift ; repository "$@" ;;
|
|
|
|
-*|"") download_usage
|
|
exit 1;;
|
|
|
|
*) packages "$@" ;;
|
|
esac
|
|
|
|
exit $downloaderr
|