2004-12-20 18:38:50 +02:00
|
|
|
#! @perl@ -w -I@libexecdir@/nix
|
2004-12-13 15:47:38 +02:00
|
|
|
|
|
|
|
use strict;
|
|
|
|
use readmanifest;
|
2006-10-04 21:58:11 +03:00
|
|
|
use POSIX qw(strftime);
|
|
|
|
use File::Temp qw(tempdir);
|
2004-12-13 15:47:38 +02:00
|
|
|
|
2008-08-02 15:54:35 +03:00
|
|
|
STDOUT->autoflush(1);
|
|
|
|
|
2004-12-20 18:38:50 +02:00
|
|
|
my $manifestDir = "@localstatedir@/nix/manifests";
|
2004-12-30 18:34:54 +02:00
|
|
|
my $logFile = "@localstatedir@/log/nix/downloads";
|
|
|
|
|
2004-12-13 15:47:38 +02:00
|
|
|
|
|
|
|
# Load all manifests.
|
|
|
|
my %narFiles;
|
2007-01-23 18:50:19 +02:00
|
|
|
my %localPaths;
|
2004-12-13 15:47:38 +02:00
|
|
|
my %patches;
|
|
|
|
|
|
|
|
for my $manifest (glob "$manifestDir/*.nixmanifest") {
|
2007-01-23 18:50:19 +02:00
|
|
|
if (readManifest($manifest, \%narFiles, \%localPaths, \%patches) < 3) {
|
2005-02-25 18:12:52 +02:00
|
|
|
print STDERR "you have an old-style manifest `$manifest'; please delete it\n";
|
|
|
|
exit 1;
|
|
|
|
}
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-08-12 03:29:28 +03:00
|
|
|
# Parse the arguments.
|
|
|
|
|
2008-08-02 15:54:35 +03:00
|
|
|
if ($ARGV[0] eq "--query") {
|
2007-08-12 03:29:28 +03:00
|
|
|
|
2008-08-02 15:54:35 +03:00
|
|
|
while (<STDIN>) {
|
|
|
|
my $cmd = $_; chomp $cmd;
|
|
|
|
|
|
|
|
if ($cmd eq "have") {
|
|
|
|
my $storePath = <STDIN>; chomp $storePath;
|
|
|
|
print STDOUT ((defined $narFiles{$storePath} or defined $localPaths{$storePath})
|
|
|
|
? "1\n" : "0\n");
|
2007-08-12 03:29:28 +03:00
|
|
|
}
|
2008-08-02 15:54:35 +03:00
|
|
|
|
|
|
|
elsif ($cmd eq "info") {
|
|
|
|
my $storePath = <STDIN>; chomp $storePath;
|
|
|
|
my $info;
|
|
|
|
if (defined $narFiles{$storePath}) {
|
|
|
|
$info = @{$narFiles{$storePath}}[0];
|
|
|
|
}
|
|
|
|
elsif (defined $localPaths{$storePath}) {
|
|
|
|
$info = @{$localPaths{$storePath}}[0];
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
print "0\n";
|
|
|
|
next; # not an error
|
|
|
|
}
|
|
|
|
print "1\n";
|
|
|
|
print "$info->{deriver}\n";
|
|
|
|
my @references = split " ", $info->{references};
|
|
|
|
print scalar @references, "\n";
|
|
|
|
print "$_\n" foreach @references;
|
2008-09-08 14:02:15 +03:00
|
|
|
my $size = $info->{size} || 0;
|
2008-08-04 16:15:47 +03:00
|
|
|
print "$size\n";
|
2007-08-12 03:29:28 +03:00
|
|
|
}
|
2008-08-02 15:54:35 +03:00
|
|
|
|
|
|
|
else { die "unknown command `$cmd'"; }
|
2007-08-12 03:29:28 +03:00
|
|
|
}
|
2008-08-02 15:54:35 +03:00
|
|
|
|
2007-08-12 03:29:28 +03:00
|
|
|
exit 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
elsif ($ARGV[0] ne "--substitute") {
|
|
|
|
die "syntax: $0 [--query-paths | --query-info PATHS... | --substitute PATH]\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
die unless scalar @ARGV == 2;
|
|
|
|
my $targetPath = $ARGV[1];
|
|
|
|
|
|
|
|
|
2008-08-02 15:54:35 +03:00
|
|
|
# Create a temporary directory.
|
|
|
|
my $tmpDir = tempdir("nix-download.XXXXXX", CLEANUP => 1, TMPDIR => 1)
|
|
|
|
or die "cannot create a temporary directory";
|
|
|
|
|
|
|
|
chdir $tmpDir or die "cannot change to `$tmpDir': $!";
|
|
|
|
|
|
|
|
my $tmpNar = "$tmpDir/nar";
|
|
|
|
my $tmpNar2 = "$tmpDir/nar2";
|
|
|
|
|
|
|
|
|
2008-07-18 18:34:46 +03:00
|
|
|
open LOGFILE, ">>$logFile" or die "cannot open log file $logFile";
|
|
|
|
|
2007-08-12 03:29:28 +03:00
|
|
|
my $date = strftime ("%F %H:%M:%S UTC", gmtime (time));
|
|
|
|
print LOGFILE "$$ get $targetPath $date\n";
|
|
|
|
|
|
|
|
print "\n*** Trying to download/patch `$targetPath'\n";
|
|
|
|
|
|
|
|
|
2007-01-23 18:50:19 +02:00
|
|
|
# If we can copy from a local path, do that.
|
|
|
|
my $localPathList = $localPaths{$targetPath};
|
|
|
|
foreach my $localPath (@{$localPathList}) {
|
|
|
|
my $sourcePath = $localPath->{copyFrom};
|
|
|
|
if (-e $sourcePath) {
|
|
|
|
print "\n*** Step 1/1: copying from $sourcePath\n";
|
|
|
|
system("@bindir@/nix-store --dump $sourcePath | @bindir@/nix-store --restore $targetPath") == 0
|
|
|
|
or die "cannot copy `$sourcePath' to `$targetPath'";
|
|
|
|
exit 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-12-13 15:47:38 +02:00
|
|
|
# Build a graph of all store paths that might contribute to the
|
|
|
|
# construction of $targetPath, and the special node "start". The
|
|
|
|
# edges are either patch operations, or downloads of full NAR files.
|
|
|
|
# The latter edges only occur between "start" and a store path.
|
|
|
|
|
|
|
|
my %graph;
|
|
|
|
|
|
|
|
$graph{"start"} = {d => 0, pred => undef, edges => []};
|
|
|
|
|
|
|
|
my @queue = ();
|
|
|
|
my $queueFront = 0;
|
|
|
|
my %done;
|
|
|
|
|
|
|
|
sub addToQueue {
|
|
|
|
my $v = shift;
|
|
|
|
return if defined $done{$v};
|
|
|
|
$done{$v} = 1;
|
|
|
|
push @queue, $v;
|
|
|
|
}
|
|
|
|
|
|
|
|
sub addNode {
|
|
|
|
my $u = shift;
|
|
|
|
$graph{$u} = {d => 999999999999, pred => undef, edges => []}
|
|
|
|
unless defined $graph{$u};
|
|
|
|
}
|
|
|
|
|
|
|
|
sub addEdge {
|
|
|
|
my $u = shift;
|
|
|
|
my $v = shift;
|
|
|
|
my $w = shift;
|
|
|
|
my $type = shift;
|
|
|
|
my $info = shift;
|
|
|
|
addNode $u;
|
|
|
|
push @{$graph{$u}->{edges}},
|
|
|
|
{weight => $w, start => $u, end => $v, type => $type, info => $info};
|
|
|
|
my $n = scalar @{$graph{$u}->{edges}};
|
|
|
|
}
|
|
|
|
|
|
|
|
addToQueue $targetPath;
|
|
|
|
|
2004-12-30 19:09:57 +02:00
|
|
|
sub isValidPath {
|
|
|
|
my $p = shift;
|
2006-09-25 13:44:27 +03:00
|
|
|
return system("@bindir@/nix-store --check-validity '$p' 2> /dev/null") == 0;
|
2004-12-30 19:09:57 +02:00
|
|
|
}
|
|
|
|
|
2005-03-14 19:05:42 +02:00
|
|
|
sub parseHash {
|
|
|
|
my $hash = shift;
|
|
|
|
if ($hash =~ /^(.+):(.+)$/) {
|
|
|
|
return ($1, $2);
|
|
|
|
} else {
|
|
|
|
return ("md5", $hash);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2004-12-13 15:47:38 +02:00
|
|
|
while ($queueFront < scalar @queue) {
|
|
|
|
my $u = $queue[$queueFront++];
|
2005-01-12 12:37:18 +02:00
|
|
|
# print "$u\n";
|
2004-12-13 15:47:38 +02:00
|
|
|
|
|
|
|
addNode $u;
|
|
|
|
|
|
|
|
# If the path already exists, it has distance 0 from the "start"
|
|
|
|
# node.
|
2004-12-30 19:09:57 +02:00
|
|
|
if (isValidPath($u)) {
|
2004-12-13 15:47:38 +02:00
|
|
|
addEdge "start", $u, 0, "present", undef;
|
|
|
|
}
|
|
|
|
|
|
|
|
else {
|
|
|
|
|
|
|
|
# Add patch edges.
|
|
|
|
my $patchList = $patches{$u};
|
|
|
|
foreach my $patch (@{$patchList}) {
|
2004-12-30 19:09:57 +02:00
|
|
|
if (isValidPath($patch->{basePath})) {
|
|
|
|
# !!! this should be cached
|
2005-03-14 19:05:42 +02:00
|
|
|
my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
|
2005-03-14 20:55:29 +02:00
|
|
|
my $format = "--base32";
|
|
|
|
$format = "" if $baseHashAlgo eq "md5";
|
|
|
|
my $hash = `@bindir@/nix-hash --type '$baseHashAlgo' $format "$patch->{basePath}"`;
|
2004-12-30 19:09:57 +02:00
|
|
|
chomp $hash;
|
2005-01-12 12:37:18 +02:00
|
|
|
# print " MY HASH is $hash\n";
|
2005-03-14 19:05:42 +02:00
|
|
|
if ($hash ne $baseHash) {
|
2005-01-12 12:37:18 +02:00
|
|
|
print LOGFILE "$$ rejecting $patch->{basePath}\n";
|
2004-12-30 19:09:57 +02:00
|
|
|
next;
|
|
|
|
}
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
2005-01-12 12:37:18 +02:00
|
|
|
# print " PATCH from $patch->{basePath}\n";
|
2004-12-13 15:47:38 +02:00
|
|
|
addToQueue $patch->{basePath};
|
|
|
|
addEdge $patch->{basePath}, $u, $patch->{size}, "patch", $patch;
|
|
|
|
}
|
|
|
|
|
|
|
|
# Add NAR file edges to the start node.
|
|
|
|
my $narFileList = $narFiles{$u};
|
|
|
|
foreach my $narFile (@{$narFileList}) {
|
2005-01-12 12:37:18 +02:00
|
|
|
# print " NAR from $narFile->{url}\n";
|
2004-12-13 15:47:38 +02:00
|
|
|
addEdge "start", $u, $narFile->{size}, "narfile", $narFile;
|
2004-12-30 19:19:47 +02:00
|
|
|
if ($u eq $targetPath) {
|
|
|
|
print LOGFILE "$$ full-download-would-be $narFile->{size}\n";
|
|
|
|
}
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Run Dijkstra's shortest path algorithm to determine the shortest
|
|
|
|
# sequence of download and/or patch actions that will produce
|
|
|
|
# $targetPath.
|
|
|
|
|
|
|
|
sub byDistance { # sort by distance, reversed
|
|
|
|
return -($graph{$a}->{d} <=> $graph{$b}->{d});
|
|
|
|
}
|
|
|
|
|
|
|
|
my @todo = keys %graph;
|
|
|
|
|
|
|
|
while (scalar @todo > 0) {
|
|
|
|
|
|
|
|
# Remove the closest element from the todo list.
|
|
|
|
@todo = sort byDistance @todo;
|
|
|
|
my $u = pop @todo;
|
|
|
|
|
|
|
|
my $u_ = $graph{$u};
|
|
|
|
|
2005-01-12 12:37:18 +02:00
|
|
|
# print "IN $u $u_->{d}\n";
|
2004-12-13 15:47:38 +02:00
|
|
|
|
|
|
|
foreach my $edge (@{$u_->{edges}}) {
|
|
|
|
my $v_ = $graph{$edge->{end}};
|
|
|
|
if ($v_->{d} > $u_->{d} + $edge->{weight}) {
|
|
|
|
$v_->{d} = $u_->{d} + $edge->{weight};
|
|
|
|
# Store the edge; to edge->start is actually the
|
|
|
|
# predecessor.
|
|
|
|
$v_->{pred} = $edge;
|
2005-01-12 12:37:18 +02:00
|
|
|
# print " RELAX $edge->{end} $v_->{d}\n";
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Retrieve the shortest path from "start" to $targetPath.
|
|
|
|
my @path = ();
|
|
|
|
my $cur = $targetPath;
|
|
|
|
die "don't know how to produce $targetPath\n"
|
|
|
|
unless defined $graph{$targetPath}->{pred};
|
|
|
|
while ($cur ne "start") {
|
|
|
|
push @path, $graph{$cur}->{pred};
|
|
|
|
$cur = $graph{$cur}->{pred}->{start};
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Traverse the shortest path, perform the actions described by the
|
|
|
|
# edges.
|
|
|
|
my $curStep = 1;
|
|
|
|
my $maxStep = scalar @path;
|
|
|
|
|
|
|
|
sub downloadFile {
|
|
|
|
my $url = shift;
|
2005-03-14 19:05:42 +02:00
|
|
|
my ($hashAlgo, $hash) = parseHash(shift);
|
2004-12-13 15:47:38 +02:00
|
|
|
$ENV{"PRINT_PATH"} = 1;
|
|
|
|
$ENV{"QUIET"} = 1;
|
2005-02-24 19:36:42 +02:00
|
|
|
$ENV{"NIX_HASH_ALGO"} = $hashAlgo;
|
2005-01-14 15:50:00 +02:00
|
|
|
my ($hash2, $path) = `@bindir@/nix-prefetch-url '$url' '$hash'`;
|
2005-03-25 16:30:01 +02:00
|
|
|
die "download of `$url' failed" unless $? == 0;
|
2004-12-13 15:47:38 +02:00
|
|
|
chomp $hash2;
|
|
|
|
chomp $path;
|
2005-01-25 19:08:52 +02:00
|
|
|
die "hash mismatch, expected $hash, got $hash2" if $hash ne $hash2;
|
2004-12-13 15:47:38 +02:00
|
|
|
return $path;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (scalar @path > 0) {
|
|
|
|
my $edge = pop @path;
|
|
|
|
my $u = $edge->{start};
|
|
|
|
my $v = $edge->{end};
|
|
|
|
|
|
|
|
print "\n*** Step $curStep/$maxStep: ";
|
|
|
|
|
|
|
|
if ($edge->{type} eq "present") {
|
|
|
|
print "using already present path `$v'\n";
|
2004-12-30 18:34:54 +02:00
|
|
|
print LOGFILE "$$ present $v\n";
|
2005-05-10 17:22:36 +03:00
|
|
|
|
|
|
|
if ($curStep < $maxStep) {
|
|
|
|
# Since this is not the last step, the path will be used
|
|
|
|
# as a base to one or more patches. So turn the base path
|
|
|
|
# into a NAR archive, to which we can apply the patch.
|
|
|
|
print " packing base path...\n";
|
2006-09-25 13:44:27 +03:00
|
|
|
system("@bindir@/nix-store --dump $v > $tmpNar") == 0
|
|
|
|
or die "cannot dump `$v'";
|
2005-05-10 17:22:36 +03:00
|
|
|
}
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
elsif ($edge->{type} eq "patch") {
|
|
|
|
my $patch = $edge->{info};
|
|
|
|
print "applying patch `$patch->{url}' to `$u' to create `$v'\n";
|
|
|
|
|
2004-12-30 18:34:54 +02:00
|
|
|
print LOGFILE "$$ patch $patch->{url} $patch->{size} $patch->{baseHash} $u $v\n";
|
|
|
|
|
2004-12-13 15:47:38 +02:00
|
|
|
# Download the patch.
|
|
|
|
print " downloading patch...\n";
|
2005-03-14 19:05:42 +02:00
|
|
|
my $patchPath = downloadFile "$patch->{url}", "$patch->{hash}";
|
2004-12-13 15:47:38 +02:00
|
|
|
|
2005-05-10 17:22:36 +03:00
|
|
|
# Apply the patch to the NAR archive produced in step 1 (for
|
|
|
|
# the already present path) or a later step (for patch sequences).
|
2004-12-13 15:47:38 +02:00
|
|
|
print " applying patch...\n";
|
2006-09-25 13:44:27 +03:00
|
|
|
system("@libexecdir@/bspatch $tmpNar $tmpNar2 $patchPath") == 0
|
|
|
|
or die "cannot apply patch `$patchPath' to $tmpNar";
|
2004-12-13 15:47:38 +02:00
|
|
|
|
2005-05-10 17:22:36 +03:00
|
|
|
if ($curStep < $maxStep) {
|
|
|
|
# The archive will be used as the base of the next patch.
|
2005-09-15 18:21:35 +03:00
|
|
|
rename "$tmpNar2", "$tmpNar" or die "cannot rename NAR archive: $!";
|
2005-05-10 17:22:36 +03:00
|
|
|
} else {
|
|
|
|
# This was the last patch. Unpack the final NAR archive
|
|
|
|
# into the target path.
|
|
|
|
print " unpacking patched archive...\n";
|
2006-09-25 13:44:27 +03:00
|
|
|
system("@bindir@/nix-store --restore $v < $tmpNar2") == 0
|
|
|
|
or die "cannot unpack $tmpNar2 into `$v'";
|
2005-05-10 17:22:36 +03:00
|
|
|
}
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
elsif ($edge->{type} eq "narfile") {
|
|
|
|
my $narFile = $edge->{info};
|
|
|
|
print "downloading `$narFile->{url}' into `$v'\n";
|
|
|
|
|
2004-12-30 18:34:54 +02:00
|
|
|
print LOGFILE "$$ narfile $narFile->{url} $narFile->{size} $v\n";
|
|
|
|
|
2004-12-13 15:47:38 +02:00
|
|
|
# Download the archive.
|
|
|
|
print " downloading archive...\n";
|
2005-03-14 19:05:42 +02:00
|
|
|
my $narFilePath = downloadFile "$narFile->{url}", "$narFile->{hash}";
|
2004-12-13 15:47:38 +02:00
|
|
|
|
2005-05-10 17:22:36 +03:00
|
|
|
if ($curStep < $maxStep) {
|
|
|
|
# The archive will be used a base to a patch.
|
2006-09-25 13:44:27 +03:00
|
|
|
system("@bunzip2@ < '$narFilePath' > $tmpNar") == 0
|
|
|
|
or die "cannot unpack `$narFilePath' into `$v'";
|
2005-05-10 17:22:36 +03:00
|
|
|
} else {
|
|
|
|
# Unpack the archive into the target path.
|
|
|
|
print " unpacking archive...\n";
|
2006-09-25 13:44:27 +03:00
|
|
|
system("@bunzip2@ < '$narFilePath' | @bindir@/nix-store --restore '$v'") == 0
|
|
|
|
or die "cannot unpack `$narFilePath' into `$v'";
|
2005-05-10 17:22:36 +03:00
|
|
|
}
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
2005-05-10 17:22:36 +03:00
|
|
|
|
|
|
|
$curStep++;
|
2004-12-13 15:47:38 +02:00
|
|
|
}
|
2004-12-30 18:34:54 +02:00
|
|
|
|
|
|
|
|
|
|
|
print LOGFILE "$$ success\n";
|
|
|
|
close LOGFILE;
|