2019-06-04 21:01:21 +03:00
|
|
|
#include "lockfile.hh"
|
|
|
|
#include "store-api.hh"
|
2020-01-29 15:57:57 +02:00
|
|
|
#include "fetchers/regex.hh"
|
2019-06-04 21:01:21 +03:00
|
|
|
|
2019-10-21 23:11:21 +03:00
|
|
|
#include <nlohmann/json.hpp>
|
|
|
|
|
2019-06-04 21:01:21 +03:00
|
|
|
namespace nix::flake {
|
|
|
|
|
2020-01-31 20:16:40 +02:00
|
|
|
FlakeRef flakeRefFromJson(const nlohmann::json & json)
|
|
|
|
{
|
2020-02-06 15:27:31 +02:00
|
|
|
return FlakeRef::fromAttrs(jsonToAttrs(json));
|
2020-01-31 20:16:40 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
FlakeRef getFlakeRef(
|
|
|
|
const nlohmann::json & json,
|
|
|
|
const char * version3Attr1,
|
|
|
|
const char * version3Attr2,
|
|
|
|
const char * version4Attr)
|
|
|
|
{
|
|
|
|
auto i = json.find(version4Attr);
|
|
|
|
if (i != json.end())
|
|
|
|
return flakeRefFromJson(*i);
|
|
|
|
|
|
|
|
// FIXME: remove these.
|
|
|
|
i = json.find(version3Attr1);
|
|
|
|
if (i != json.end())
|
|
|
|
return parseFlakeRef(*i);
|
|
|
|
|
|
|
|
i = json.find(version3Attr2);
|
|
|
|
if (i != json.end())
|
|
|
|
return parseFlakeRef(*i);
|
|
|
|
|
|
|
|
throw Error("attribute '%s' missing in lock file", version4Attr);
|
|
|
|
}
|
|
|
|
|
2020-02-02 00:33:44 +02:00
|
|
|
static TreeInfo parseTreeInfo(const nlohmann::json & json)
|
|
|
|
{
|
|
|
|
TreeInfo info;
|
|
|
|
|
|
|
|
auto i = json.find("info");
|
|
|
|
if (i != json.end()) {
|
|
|
|
const nlohmann::json & i2(*i);
|
|
|
|
|
|
|
|
auto j = i2.find("narHash");
|
|
|
|
if (j != i2.end())
|
|
|
|
info.narHash = Hash((std::string) *j);
|
|
|
|
else
|
|
|
|
throw Error("attribute 'narHash' missing in lock file");
|
|
|
|
|
|
|
|
j = i2.find("revCount");
|
|
|
|
if (j != i2.end())
|
|
|
|
info.revCount = *j;
|
|
|
|
|
|
|
|
j = i2.find("lastModified");
|
|
|
|
if (j != i2.end())
|
|
|
|
info.lastModified = *j;
|
|
|
|
|
|
|
|
return info;
|
|
|
|
}
|
|
|
|
|
|
|
|
i = json.find("narHash");
|
|
|
|
if (i != json.end()) {
|
|
|
|
info.narHash = Hash((std::string) *i);
|
|
|
|
return info;
|
|
|
|
}
|
|
|
|
|
|
|
|
throw Error("attribute 'info' missing in lock file");
|
|
|
|
}
|
|
|
|
|
2019-08-30 17:27:51 +03:00
|
|
|
LockedInput::LockedInput(const nlohmann::json & json)
|
|
|
|
: LockedInputs(json)
|
2020-02-02 01:05:53 +02:00
|
|
|
, lockedRef(getFlakeRef(json, "url", "uri", "locked"))
|
|
|
|
, originalRef(getFlakeRef(json, "originalUrl", "originalUri", "original"))
|
2020-02-02 00:33:44 +02:00
|
|
|
, info(parseTreeInfo(json))
|
2020-03-09 16:27:49 +02:00
|
|
|
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
2019-06-04 21:01:21 +03:00
|
|
|
{
|
2020-02-02 12:31:58 +02:00
|
|
|
if (!lockedRef.input->isImmutable())
|
2020-02-02 01:05:53 +02:00
|
|
|
throw Error("lockfile contains mutable flakeref '%s'", lockedRef);
|
2019-06-04 21:01:21 +03:00
|
|
|
}
|
|
|
|
|
2020-02-02 00:33:44 +02:00
|
|
|
static nlohmann::json treeInfoToJson(const TreeInfo & info)
|
|
|
|
{
|
|
|
|
nlohmann::json json;
|
|
|
|
assert(info.narHash);
|
|
|
|
json["narHash"] = info.narHash.to_string(SRI);
|
|
|
|
if (info.revCount)
|
|
|
|
json["revCount"] = *info.revCount;
|
|
|
|
if (info.lastModified)
|
|
|
|
json["lastModified"] = *info.lastModified;
|
|
|
|
return json;
|
|
|
|
}
|
|
|
|
|
2019-08-30 17:27:51 +03:00
|
|
|
nlohmann::json LockedInput::toJson() const
|
2019-06-04 21:01:21 +03:00
|
|
|
{
|
2019-08-30 17:27:51 +03:00
|
|
|
auto json = LockedInputs::toJson();
|
2020-02-02 01:05:53 +02:00
|
|
|
json["original"] = fetchers::attrsToJson(originalRef.toAttrs());
|
|
|
|
json["locked"] = fetchers::attrsToJson(lockedRef.toAttrs());
|
2020-02-02 00:33:44 +02:00
|
|
|
json["info"] = treeInfoToJson(info);
|
2020-03-09 16:27:49 +02:00
|
|
|
if (!isFlake) json["flake"] = false;
|
2019-06-04 21:01:21 +03:00
|
|
|
return json;
|
|
|
|
}
|
|
|
|
|
2020-01-30 01:13:37 +02:00
|
|
|
StorePath LockedInput::computeStorePath(Store & store) const
|
2019-06-04 21:01:21 +03:00
|
|
|
{
|
2020-02-02 13:29:53 +02:00
|
|
|
return info.computeStorePath(store);
|
2019-06-04 21:01:21 +03:00
|
|
|
}
|
|
|
|
|
2019-08-30 17:27:51 +03:00
|
|
|
LockedInputs::LockedInputs(const nlohmann::json & json)
|
2019-06-04 21:01:21 +03:00
|
|
|
{
|
2019-06-04 21:08:13 +03:00
|
|
|
for (auto & i : json["inputs"].items())
|
2019-08-30 17:27:51 +03:00
|
|
|
inputs.insert_or_assign(i.key(), LockedInput(i.value()));
|
2019-06-04 21:01:21 +03:00
|
|
|
}
|
|
|
|
|
2019-08-30 17:27:51 +03:00
|
|
|
nlohmann::json LockedInputs::toJson() const
|
2019-06-04 21:01:21 +03:00
|
|
|
{
|
|
|
|
nlohmann::json json;
|
|
|
|
{
|
|
|
|
auto j = nlohmann::json::object();
|
2019-08-30 17:27:51 +03:00
|
|
|
for (auto & i : inputs)
|
2019-06-04 21:01:21 +03:00
|
|
|
j[i.first] = i.second.toJson();
|
|
|
|
json["inputs"] = std::move(j);
|
|
|
|
}
|
|
|
|
return json;
|
|
|
|
}
|
|
|
|
|
2020-03-09 16:28:41 +02:00
|
|
|
std::string LockedInputs::to_string() const
|
|
|
|
{
|
|
|
|
return toJson().dump(2);
|
|
|
|
}
|
|
|
|
|
2020-01-21 17:27:53 +02:00
|
|
|
bool LockedInputs::isImmutable() const
|
2019-07-12 14:29:54 +03:00
|
|
|
{
|
2019-08-30 17:27:51 +03:00
|
|
|
for (auto & i : inputs)
|
2020-02-02 12:31:58 +02:00
|
|
|
if (!i.second.lockedRef.input->isImmutable() || !i.second.isImmutable()) return false;
|
2019-07-12 14:29:54 +03:00
|
|
|
|
2020-01-21 17:27:53 +02:00
|
|
|
return true;
|
2019-07-12 14:29:54 +03:00
|
|
|
}
|
|
|
|
|
Respect lock files of inputs + fine-grained lock file control
When computing a lock file, we now respect the lock files of flake
inputs. This is important for usability / reproducibility. For
example, the 'nixops' flake depends on the 'nixops-aws' and
'nixops-hetzner' repositories. So when the 'nixops' flake is used in
another flake, we want the versions of 'nixops-aws' and
'nixops-hetzner' locked by the the 'nixops' flake because those
presumably have been tested.
This can lead to a proliferation of versions of flakes like 'nixpkgs'
(since every flake's lock file could depend on a different version of
'nixpkgs'). This is not a major issue when using Nixpkgs overlays or
NixOS modules, since then the top-level flake composes those
overlays/modules into *its* version of Nixpkgs and all other versions
are ignored. Lock file computation has been made a bit more lazy so it
won't try to fetch all those versions of 'nixpkgs'.
However, in case it's necessary to minimize flake versions, there now
are two input attributes that allow this. First, you can copy an input
from another flake, as follows:
inputs.nixpkgs.follows = "dwarffs/nixpkgs";
This states that the calling flake's 'nixpkgs' input shall be the same
as the 'nixpkgs' input of the 'dwarffs' input.
Second, you can override inputs of inputs:
inputs.nixpkgs.url = github:edolstra/nixpkgs/<hash>;
inputs.nixops.inputs.nixpkgs.url = github:edolstra/nixpkgs/<hash>;
or equivalently, using 'follows':
inputs.nixpkgs.url = github:edolstra/nixpkgs/<hash>;
inputs.nixops.inputs.nixpkgs.follows = "nixpkgs";
This states that the 'nixpkgs' input of the 'nixops' input shall be
the same as the calling flake's 'nixpkgs' input.
Finally, at '-v' Nix now prints the changes to the lock file, e.g.
$ nix flake update ~/Misc/eelco-configurations/hagbard
inputs of flake 'git+file:///home/eelco/Misc/eelco-configurations?subdir=hagbard' changed:
updated 'nixpkgs': 'github:edolstra/nixpkgs/7845bf5f4b3013df1cf036e9c9c3a55a30331db9' -> 'github:edolstra/nixpkgs/03f3def66a104a221aac8b751eeb7075374848fd'
removed 'nixops'
removed 'nixops/nixops-aws'
removed 'nixops/nixops-hetzner'
removed 'nixops/nixpkgs'
2020-01-24 23:05:11 +02:00
|
|
|
std::optional<LockedInput *> LockedInputs::findInput(const InputPath & path)
|
|
|
|
{
|
|
|
|
assert(!path.empty());
|
|
|
|
|
|
|
|
LockedInputs * pos = this;
|
|
|
|
|
|
|
|
for (auto & elem : path) {
|
|
|
|
auto i = pos->inputs.find(elem);
|
|
|
|
if (i == pos->inputs.end())
|
|
|
|
return {};
|
|
|
|
pos = &i->second;
|
|
|
|
}
|
|
|
|
|
|
|
|
return (LockedInput *) pos;
|
|
|
|
}
|
|
|
|
|
2020-01-30 00:12:58 +02:00
|
|
|
void LockedInputs::removeInput(const InputPath & path)
|
|
|
|
{
|
|
|
|
assert(!path.empty());
|
|
|
|
|
|
|
|
LockedInputs * pos = this;
|
|
|
|
|
|
|
|
for (size_t n = 0; n < path.size(); n++) {
|
|
|
|
auto i = pos->inputs.find(path[n]);
|
|
|
|
if (i == pos->inputs.end()) return;
|
|
|
|
if (n + 1 == path.size())
|
|
|
|
pos->inputs.erase(i);
|
|
|
|
else
|
|
|
|
pos = &i->second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-04 21:01:21 +03:00
|
|
|
nlohmann::json LockFile::toJson() const
|
|
|
|
{
|
2019-08-30 17:27:51 +03:00
|
|
|
auto json = LockedInputs::toJson();
|
2020-01-31 20:16:40 +02:00
|
|
|
json["version"] = 4;
|
2019-06-04 21:01:21 +03:00
|
|
|
return json;
|
|
|
|
}
|
|
|
|
|
|
|
|
LockFile LockFile::read(const Path & path)
|
|
|
|
{
|
|
|
|
if (pathExists(path)) {
|
|
|
|
auto json = nlohmann::json::parse(readFile(path));
|
|
|
|
|
|
|
|
auto version = json.value("version", 0);
|
2020-01-31 20:16:40 +02:00
|
|
|
if (version != 3 && version != 4)
|
2019-06-04 21:01:21 +03:00
|
|
|
throw Error("lock file '%s' has unsupported version %d", path, version);
|
|
|
|
|
|
|
|
return LockFile(json);
|
|
|
|
} else
|
|
|
|
return LockFile();
|
|
|
|
}
|
|
|
|
|
|
|
|
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
|
|
|
|
{
|
2020-01-31 20:16:40 +02:00
|
|
|
stream << lockFile.toJson().dump(2);
|
2019-06-04 21:01:21 +03:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
|
|
|
void LockFile::write(const Path & path) const
|
|
|
|
{
|
|
|
|
createDirs(dirOf(path));
|
|
|
|
writeFile(path, fmt("%s\n", *this));
|
|
|
|
}
|
|
|
|
|
2020-01-29 15:57:57 +02:00
|
|
|
InputPath parseInputPath(std::string_view s)
|
|
|
|
{
|
|
|
|
InputPath path;
|
|
|
|
|
|
|
|
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
|
|
|
|
if (!std::regex_match(elem, fetchers::flakeIdRegex))
|
|
|
|
throw Error("invalid flake input path element '%s'", elem);
|
|
|
|
path.push_back(elem);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (path.empty())
|
|
|
|
throw Error("flake input path is empty");
|
|
|
|
|
|
|
|
return path;
|
|
|
|
}
|
|
|
|
|
2019-06-04 21:01:21 +03:00
|
|
|
}
|