nix-super/scripts/nix-prefetch-url.in

138 lines
3.8 KiB
Text
Raw Normal View History

#! @shell@ -e
url=$1
expHash=$2
# needed to make it work on NixOS
export PATH=$PATH:@coreutils@
hashType=$NIX_HASH_ALGO
if test -z "$hashType"; then
hashType=sha256
fi
hashFormat=
2005-03-14 20:55:46 +02:00
if test "$hashType" != "md5"; then
hashFormat=--base32
fi
* Removed the `id' attribute hack. * Formalise the notion of fixed-output derivations, i.e., derivations for which a cryptographic hash of the output is known in advance. Changes to such derivations should not propagate upwards through the dependency graph. Previously this was done by specifying the hash component of the output path through the `id' attribute, but this is insecure since you can lie about it (i.e., you can specify any hash and then produce a completely different output). Now the responsibility for checking the output is moved from the builder to Nix itself. A fixed-output derivation can be created by specifying the `outputHash' and `outputHashAlgo' attributes, the latter taking values `md5', `sha1', and `sha256', and the former specifying the actual hash in hexadecimal or in base-32 (auto-detected by looking at the length of the attribute value). MD5 is included for compatibility but should be considered deprecated. * Removed the `drvPath' pseudo-attribute in derivation results. It's no longer necessary. * Cleaned up the support for multiple output paths in derivation store expressions. Each output now has a unique identifier (e.g., `out', `devel', `docs'). Previously there was no way to tell output paths apart at the store expression level. * `nix-hash' now has a flag `--base32' to specify that the hash should be printed in base-32 notation. * `fetchurl' accepts parameters `sha256' and `sha1' in addition to `md5'. * `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a flag to specify the hash.)
2005-01-17 18:55:19 +02:00
if test -z "$url"; then
echo "syntax: nix-prefetch-url URL [EXPECTED-HASH]" >&2
exit 1
fi
name=$(basename "$url")
if test -z "$name"; then echo "invalid url"; exit 1; fi
# If the hash was given, a file with that hash may already be in the
# store.
if test -n "$expHash"; then
finalPath=$(@bindir@/nix-store --print-fixed-path "$hashType" "$expHash" "$name")
if ! @bindir@/nix-store --check-validity "$finalPath" 2> /dev/null; then
finalPath=
fi
hash=$expHash
fi
mkTempDir() {
local i=0
while true; do
if test -z "$TMPDIR"; then TMPDIR=/tmp; fi
tmpPath=$TMPDIR/nix-prefetch-url-$$-$i
if mkdir "$tmpPath"; then break; fi
# !!! to bad we can't check for ENOENT in mkdir, so this check
# is slightly racy (it bombs out if somebody just removed
# $tmpPath...).
if ! test -e "$tmpPath"; then exit 1; fi
i=$((i + 1))
done
trap removeTempDir EXIT SIGINT SIGQUIT
}
removeTempDir() {
if test -n "$tmpPath"; then
rm -rf "$tmpPath" || true
fi
}
doDownload() {
2007-08-15 12:24:06 +03:00
@curl@ $cacheFlags --fail -# --location --max-redirs 20 --disable-epsv \
--cookie-jar $tmpPath/cookies "$url" -o $tmpFile
}
# If we don't know the hash or a file with that hash doesn't exist,
# download the file and add it to the store.
if test -z "$finalPath"; then
mkTempDir
tmpFile=$tmpPath/$name
# Optionally do timestamp-based caching of the download.
# Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
# the hash and the timestamp of the file at $url. The caching of
# the file *contents* is done in Nix store, where it can be
# garbage-collected independently.
if test -n "$NIX_DOWNLOAD_CACHE"; then
2007-08-10 03:22:21 +03:00
echo -n "$url" > $tmpPath/url
urlHash=$(nix-hash --type sha256 --base32 --flat $tmpPath/url)
echo "$url" > "$NIX_DOWNLOAD_CACHE/$urlHash.url"
cachedHashFN="$NIX_DOWNLOAD_CACHE/$urlHash.$hashType"
cachedTimestampFN="$NIX_DOWNLOAD_CACHE/$urlHash.stamp"
cacheFlags="--remote-time"
if test -e "$cachedTimestampFN" -a -e "$cachedHashFN"; then
# Only download the file if it is newer than the cached version.
cacheFlags="$cacheFlags --time-cond $cachedTimestampFN"
fi
fi
# Perform the download.
doDownload
if test -n "$NIX_DOWNLOAD_CACHE" -a ! -e $tmpFile; then
# Curl didn't create $tmpFile, so apparently there's no newer
# file on the server.
hash=$(cat $cachedHashFN)
finalPath=$(@bindir@/nix-store --print-fixed-path "$hashType" "$hash" "$name")
if ! @bindir@/nix-store --check-validity "$finalPath" 2> /dev/null; then
echo "cached contents of \`$url' disappeared, redownloading..." >&2
finalPath=
cacheFlags="--remote-time"
doDownload
fi
fi
if test -z "$finalPath"; then
# Compute the hash.
hash=$(@bindir@/nix-hash --type "$hashType" $hashFormat --flat $tmpFile)
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
if test -n "$NIX_DOWNLOAD_CACHE"; then
echo $hash > $cachedHashFN
touch -r $tmpFile $cachedTimestampFN
fi
# Add the downloaded file to the Nix store.
finalPath=$(@bindir@/nix-store --add-fixed "$hashType" $tmpFile)
if test -n "$expHash" -a "$expHash" != "$hash"; then
echo "hash mismatch for URL \`$url'" >&2
exit 1
fi
fi
fi
if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
echo $hash
if test -n "$PRINT_PATH"; then
echo $finalPath
fi