mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-30 01:26:15 +02:00
Turn flake inputs into an attrset
Instead of a list, inputs are now an attrset like inputs = { nixpkgs.uri = github:NixOS/nixpkgs; }; If 'uri' is omitted, than the flake is a lookup in the flake registry, e.g. inputs = { nixpkgs = {}; }; but in that case, you can also just omit the input altogether and specify it as an argument to the 'outputs' function, as in outputs = { self, nixpkgs }: ... This also gets rid of 'nonFlakeInputs', which are now just a special kind of input that have a 'flake = false' attribute, e.g. inputs = { someRepo = { uri = github:example/repo; flake = false; }; };
This commit is contained in:
parent
0588d72286
commit
30ccf4e52d
7 changed files with 136 additions and 220 deletions
|
@ -248,22 +248,28 @@ Flake getFlake(EvalState & state, const FlakeRef & flakeRef)
|
|||
flake.description = state.forceStringNoCtx(*(**description).value, *(**description).pos);
|
||||
|
||||
auto sInputs = state.symbols.create("inputs");
|
||||
auto sUri = state.symbols.create("uri");
|
||||
auto sFlake = state.symbols.create("flake");
|
||||
|
||||
if (auto inputs = vInfo.attrs->get(sInputs)) {
|
||||
state.forceList(*(**inputs).value, *(**inputs).pos);
|
||||
for (unsigned int n = 0; n < (**inputs).value->listSize(); ++n)
|
||||
flake.inputs.push_back(FlakeRef(state.forceStringNoCtx(
|
||||
*(**inputs).value->listElems()[n], *(**inputs).pos)));
|
||||
}
|
||||
if (std::optional<Attr *> inputs = vInfo.attrs->get(sInputs)) {
|
||||
state.forceAttrs(*(**inputs).value, *(**inputs).pos);
|
||||
|
||||
auto sNonFlakeInputs = state.symbols.create("nonFlakeInputs");
|
||||
for (Attr inputAttr : *(*(**inputs).value).attrs) {
|
||||
state.forceAttrs(*inputAttr.value, *inputAttr.pos);
|
||||
|
||||
if (std::optional<Attr *> nonFlakeInputs = vInfo.attrs->get(sNonFlakeInputs)) {
|
||||
state.forceAttrs(*(**nonFlakeInputs).value, *(**nonFlakeInputs).pos);
|
||||
for (Attr attr : *(*(**nonFlakeInputs).value).attrs) {
|
||||
std::string myNonFlakeUri = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
FlakeRef nonFlakeRef = FlakeRef(myNonFlakeUri);
|
||||
flake.nonFlakeInputs.insert_or_assign(attr.name, nonFlakeRef);
|
||||
FlakeInput input(FlakeRef(inputAttr.name));
|
||||
|
||||
for (Attr attr : *(inputAttr.value->attrs)) {
|
||||
if (attr.name == sUri) {
|
||||
input.ref = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
} else if (attr.name == sFlake) {
|
||||
input.isFlake = state.forceBool(*attr.value, *attr.pos);
|
||||
} else
|
||||
throw Error("flake input '%s' has an unsupported attribute '%s', at %s",
|
||||
inputAttr.name, attr.name, *attr.pos);
|
||||
}
|
||||
|
||||
flake.inputs.emplace(inputAttr.name, input);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -275,9 +281,8 @@ Flake getFlake(EvalState & state, const FlakeRef & flakeRef)
|
|||
|
||||
if (flake.vOutputs->lambda.fun->matchAttrs) {
|
||||
for (auto & formal : flake.vOutputs->lambda.fun->formals->formals) {
|
||||
if (formal.name != state.sSelf) {
|
||||
flake.inputs.push_back(FlakeRef(formal.name));
|
||||
}
|
||||
if (formal.name != state.sSelf)
|
||||
flake.inputs.emplace(formal.name, FlakeInput(FlakeRef(formal.name)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +295,6 @@ Flake getFlake(EvalState & state, const FlakeRef & flakeRef)
|
|||
attr.name != state.sName &&
|
||||
attr.name != state.sDescription &&
|
||||
attr.name != sInputs &&
|
||||
attr.name != sNonFlakeInputs &&
|
||||
attr.name != sOutputs)
|
||||
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
|
||||
flakeRef, attr.name, *attr.pos);
|
||||
|
@ -299,21 +303,19 @@ Flake getFlake(EvalState & state, const FlakeRef & flakeRef)
|
|||
return flake;
|
||||
}
|
||||
|
||||
NonFlake getNonFlake(EvalState & state, const FlakeRef & flakeRef)
|
||||
static SourceInfo getNonFlake(EvalState & state, const FlakeRef & flakeRef)
|
||||
{
|
||||
auto sourceInfo = fetchFlake(state, flakeRef);
|
||||
debug("got non-flake source '%s' with flakeref %s", sourceInfo.storePath, sourceInfo.resolvedRef.to_string());
|
||||
|
||||
FlakeRef resolvedRef = sourceInfo.resolvedRef;
|
||||
|
||||
NonFlake nonFlake(flakeRef, sourceInfo);
|
||||
|
||||
state.store->assertStorePath(nonFlake.sourceInfo.storePath);
|
||||
state.store->assertStorePath(sourceInfo.storePath);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(nonFlake.sourceInfo.storePath);
|
||||
state.allowedPaths->insert(sourceInfo.storePath);
|
||||
|
||||
return nonFlake;
|
||||
return sourceInfo;
|
||||
}
|
||||
|
||||
bool allowedToWrite(HandleLockFile handle)
|
||||
|
@ -346,46 +348,33 @@ bool allowedToUseRegistries(HandleLockFile handle, bool isTopRef)
|
|||
|
||||
Note that this is lazy: we only recursively fetch inputs that are
|
||||
not in the lockfile yet. */
|
||||
static std::pair<Flake, FlakeInput> updateLocks(
|
||||
static std::pair<Flake, LockedInput> updateLocks(
|
||||
EvalState & state,
|
||||
const Flake & flake,
|
||||
HandleLockFile handleLockFile,
|
||||
const FlakeInputs & oldEntry,
|
||||
const LockedInputs & oldEntry,
|
||||
bool topRef)
|
||||
{
|
||||
FlakeInput newEntry(
|
||||
flake.id,
|
||||
LockedInput newEntry(
|
||||
flake.sourceInfo.resolvedRef,
|
||||
flake.sourceInfo.narHash);
|
||||
|
||||
for (auto & input : flake.nonFlakeInputs) {
|
||||
auto & id = input.first;
|
||||
auto & ref = input.second;
|
||||
auto i = oldEntry.nonFlakeInputs.find(id);
|
||||
if (i != oldEntry.nonFlakeInputs.end()) {
|
||||
newEntry.nonFlakeInputs.insert_or_assign(i->first, i->second);
|
||||
for (auto & [id, input] : flake.inputs) {
|
||||
auto i = oldEntry.inputs.find(id);
|
||||
if (i != oldEntry.inputs.end()) {
|
||||
newEntry.inputs.insert_or_assign(id, i->second);
|
||||
} else {
|
||||
if (handleLockFile == AllPure || handleLockFile == TopRefUsesRegistries)
|
||||
throw Error("cannot update non-flake dependency '%s' in pure mode", id);
|
||||
auto nonFlake = getNonFlake(state, maybeLookupFlake(state, ref, allowedToUseRegistries(handleLockFile, false)));
|
||||
newEntry.nonFlakeInputs.insert_or_assign(id,
|
||||
NonFlakeInput(
|
||||
nonFlake.sourceInfo.resolvedRef,
|
||||
nonFlake.sourceInfo.narHash));
|
||||
}
|
||||
}
|
||||
|
||||
for (auto & inputRef : flake.inputs) {
|
||||
auto i = oldEntry.flakeInputs.find(inputRef);
|
||||
if (i != oldEntry.flakeInputs.end()) {
|
||||
newEntry.flakeInputs.insert_or_assign(inputRef, i->second);
|
||||
} else {
|
||||
if (handleLockFile == AllPure || handleLockFile == TopRefUsesRegistries)
|
||||
throw Error("cannot update flake dependency '%s' in pure mode", inputRef);
|
||||
newEntry.flakeInputs.insert_or_assign(inputRef,
|
||||
updateLocks(state,
|
||||
getFlake(state, maybeLookupFlake(state, inputRef, allowedToUseRegistries(handleLockFile, false))),
|
||||
handleLockFile, {}, false).second);
|
||||
throw Error("cannot update flake input '%s' in pure mode", id);
|
||||
if (input.isFlake)
|
||||
newEntry.inputs.insert_or_assign(id,
|
||||
updateLocks(state,
|
||||
getFlake(state, maybeLookupFlake(state, input.ref, allowedToUseRegistries(handleLockFile, false))),
|
||||
handleLockFile, {}, false).second);
|
||||
else {
|
||||
auto sourceInfo = getNonFlake(state, maybeLookupFlake(state, input.ref, allowedToUseRegistries(handleLockFile, false)));
|
||||
newEntry.inputs.insert_or_assign(id, LockedInput(sourceInfo.resolvedRef, sourceInfo.narHash));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -462,81 +451,69 @@ static void emitSourceInfoAttrs(EvalState & state, const SourceInfo & sourceInfo
|
|||
std::put_time(std::gmtime(&*sourceInfo.lastModified), "%Y%m%d%H%M%S")));
|
||||
}
|
||||
|
||||
struct LazyInput
|
||||
{
|
||||
bool isFlake;
|
||||
LockedInput lockedInput;
|
||||
};
|
||||
|
||||
/* Helper primop to make callFlake (below) fetch/call its inputs
|
||||
lazily. Note that this primop cannot be called by user code since
|
||||
it doesn't appear in 'builtins'. */
|
||||
static void prim_callFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
auto lazyFlake = (FlakeInput *) args[0]->attrs;
|
||||
auto lazyInput = (LazyInput *) args[0]->attrs;
|
||||
|
||||
assert(lazyFlake->ref.isImmutable());
|
||||
assert(lazyInput->lockedInput.ref.isImmutable());
|
||||
|
||||
auto flake = getFlake(state, lazyFlake->ref);
|
||||
if (lazyInput->isFlake) {
|
||||
auto flake = getFlake(state, lazyInput->lockedInput.ref);
|
||||
|
||||
if (flake.sourceInfo.narHash != lazyFlake->narHash)
|
||||
throw Error("the content hash of flake '%s' doesn't match the hash recorded in the referring lockfile", flake.sourceInfo.resolvedRef);
|
||||
if (flake.sourceInfo.narHash != lazyInput->lockedInput.narHash)
|
||||
throw Error("the content hash of flake '%s' doesn't match the hash recorded in the referring lockfile", flake.sourceInfo.resolvedRef);
|
||||
|
||||
callFlake(state, flake, *lazyFlake, v);
|
||||
}
|
||||
callFlake(state, flake, lazyInput->lockedInput, v);
|
||||
} else {
|
||||
auto sourceInfo = getNonFlake(state, lazyInput->lockedInput.ref);
|
||||
|
||||
static void prim_callNonFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
auto lazyNonFlake = (NonFlakeInput *) args[0]->attrs;
|
||||
if (sourceInfo.narHash != lazyInput->lockedInput.narHash)
|
||||
throw Error("the content hash of repository '%s' doesn't match the hash recorded in the referring lockfile", sourceInfo.resolvedRef);
|
||||
|
||||
assert(lazyNonFlake->ref.isImmutable());
|
||||
state.mkAttrs(v, 8);
|
||||
|
||||
auto nonFlake = getNonFlake(state, lazyNonFlake->ref);
|
||||
assert(state.store->isValidPath(sourceInfo.storePath));
|
||||
|
||||
if (nonFlake.sourceInfo.narHash != lazyNonFlake->narHash)
|
||||
throw Error("the content hash of repository '%s' doesn't match the hash recorded in the referring lockfile", nonFlake.sourceInfo.resolvedRef);
|
||||
mkString(*state.allocAttr(v, state.sOutPath),
|
||||
sourceInfo.storePath, {sourceInfo.storePath});
|
||||
|
||||
state.mkAttrs(v, 8);
|
||||
|
||||
assert(state.store->isValidPath(nonFlake.sourceInfo.storePath));
|
||||
|
||||
mkString(*state.allocAttr(v, state.sOutPath),
|
||||
nonFlake.sourceInfo.storePath, {nonFlake.sourceInfo.storePath});
|
||||
|
||||
emitSourceInfoAttrs(state, nonFlake.sourceInfo, v);
|
||||
emitSourceInfoAttrs(state, sourceInfo, v);
|
||||
}
|
||||
}
|
||||
|
||||
void callFlake(EvalState & state,
|
||||
const Flake & flake,
|
||||
const FlakeInputs & inputs,
|
||||
const LockedInputs & lockedInputs,
|
||||
Value & vRes)
|
||||
{
|
||||
auto & vInputs = *state.allocValue();
|
||||
|
||||
state.mkAttrs(vInputs,
|
||||
inputs.flakeInputs.size() +
|
||||
inputs.nonFlakeInputs.size() + 1);
|
||||
state.mkAttrs(vInputs, flake.inputs.size() + 1);
|
||||
|
||||
for (auto & dep : inputs.flakeInputs) {
|
||||
auto vFlake = state.allocAttr(vInputs, dep.second.id);
|
||||
for (auto & [inputId, input] : flake.inputs) {
|
||||
auto vFlake = state.allocAttr(vInputs, inputId);
|
||||
auto vPrimOp = state.allocValue();
|
||||
static auto primOp = new PrimOp(prim_callFlake, 1, state.symbols.create("callFlake"));
|
||||
vPrimOp->type = tPrimOp;
|
||||
vPrimOp->primOp = primOp;
|
||||
auto vArg = state.allocValue();
|
||||
vArg->type = tNull;
|
||||
auto lockedInput = lockedInputs.inputs.find(inputId);
|
||||
assert(lockedInput != lockedInputs.inputs.end());
|
||||
// FIXME: leak
|
||||
vArg->attrs = (Bindings *) new FlakeInput(dep.second); // evil! also inefficient
|
||||
vArg->attrs = (Bindings *) new LazyInput{input.isFlake, lockedInput->second};
|
||||
mkApp(*vFlake, *vPrimOp, *vArg);
|
||||
}
|
||||
|
||||
for (auto & dep : inputs.nonFlakeInputs) {
|
||||
auto vNonFlake = state.allocAttr(vInputs, dep.first);
|
||||
auto vPrimOp = state.allocValue();
|
||||
static auto primOp = new PrimOp(prim_callNonFlake, 1, state.symbols.create("callNonFlake"));
|
||||
vPrimOp->type = tPrimOp;
|
||||
vPrimOp->primOp = primOp;
|
||||
auto vArg = state.allocValue();
|
||||
vArg->type = tNull;
|
||||
// FIXME: leak
|
||||
vArg->attrs = (Bindings *) new NonFlakeInput(dep.second); // evil! also inefficient
|
||||
mkApp(*vNonFlake, *vPrimOp, *vArg);
|
||||
}
|
||||
|
||||
auto & vSourceInfo = *state.allocValue();
|
||||
state.mkAttrs(vSourceInfo, 8);
|
||||
emitSourceInfoAttrs(state, flake.sourceInfo, vSourceInfo);
|
||||
|
|
|
@ -58,14 +58,20 @@ struct SourceInfo
|
|||
SourceInfo(const FlakeRef & resolvRef) : resolvedRef(resolvRef) {};
|
||||
};
|
||||
|
||||
struct FlakeInput
|
||||
{
|
||||
FlakeRef ref;
|
||||
bool isFlake = true;
|
||||
FlakeInput(const FlakeRef & ref) : ref(ref) {};
|
||||
};
|
||||
|
||||
struct Flake
|
||||
{
|
||||
FlakeId id;
|
||||
FlakeRef originalRef;
|
||||
std::string description;
|
||||
SourceInfo sourceInfo;
|
||||
std::vector<FlakeRef> inputs;
|
||||
std::map<FlakeAlias, FlakeRef> nonFlakeInputs;
|
||||
std::map<FlakeId, FlakeInput> inputs;
|
||||
Value * vOutputs; // FIXME: gc
|
||||
unsigned int edition;
|
||||
|
||||
|
@ -73,14 +79,6 @@ struct Flake
|
|||
: originalRef(origRef), sourceInfo(sourceInfo) {};
|
||||
};
|
||||
|
||||
struct NonFlake
|
||||
{
|
||||
FlakeRef originalRef;
|
||||
SourceInfo sourceInfo;
|
||||
NonFlake(const FlakeRef & origRef, const SourceInfo & sourceInfo)
|
||||
: originalRef(origRef), sourceInfo(sourceInfo) {};
|
||||
};
|
||||
|
||||
Flake getFlake(EvalState &, const FlakeRef &);
|
||||
|
||||
/* If 'allowLookup' is true, then resolve 'flakeRef' using the
|
||||
|
@ -108,7 +106,7 @@ ResolvedFlake resolveFlake(EvalState &, const FlakeRef &, HandleLockFile);
|
|||
|
||||
void callFlake(EvalState & state,
|
||||
const Flake & flake,
|
||||
const FlakeInputs & inputs,
|
||||
const LockedInputs & inputs,
|
||||
Value & v);
|
||||
|
||||
void callFlake(EvalState & state,
|
||||
|
|
|
@ -3,83 +3,57 @@
|
|||
|
||||
namespace nix::flake {
|
||||
|
||||
AbstractInput::AbstractInput(const nlohmann::json & json)
|
||||
: ref(json["uri"])
|
||||
LockedInput::LockedInput(const nlohmann::json & json)
|
||||
: LockedInputs(json)
|
||||
, ref(json["uri"])
|
||||
, narHash(Hash((std::string) json["narHash"]))
|
||||
{
|
||||
if (!ref.isImmutable())
|
||||
throw Error("lockfile contains mutable flakeref '%s'", ref);
|
||||
}
|
||||
|
||||
nlohmann::json AbstractInput::toJson() const
|
||||
nlohmann::json LockedInput::toJson() const
|
||||
{
|
||||
nlohmann::json json;
|
||||
auto json = LockedInputs::toJson();
|
||||
json["uri"] = ref.to_string();
|
||||
json["narHash"] = narHash.to_string(SRI);
|
||||
return json;
|
||||
}
|
||||
|
||||
Path AbstractInput::computeStorePath(Store & store) const
|
||||
Path LockedInput::computeStorePath(Store & store) const
|
||||
{
|
||||
return store.makeFixedOutputPath(true, narHash, "source");
|
||||
}
|
||||
|
||||
FlakeInput::FlakeInput(const nlohmann::json & json)
|
||||
: FlakeInputs(json)
|
||||
, AbstractInput(json)
|
||||
, id(json["id"])
|
||||
LockedInputs::LockedInputs(const nlohmann::json & json)
|
||||
{
|
||||
}
|
||||
|
||||
nlohmann::json FlakeInput::toJson() const
|
||||
{
|
||||
auto json = FlakeInputs::toJson();
|
||||
json.update(AbstractInput::toJson());
|
||||
json["id"] = id;
|
||||
return json;
|
||||
}
|
||||
|
||||
FlakeInputs::FlakeInputs(const nlohmann::json & json)
|
||||
{
|
||||
for (auto & i : json["nonFlakeInputs"].items())
|
||||
nonFlakeInputs.insert_or_assign(i.key(), NonFlakeInput(i.value()));
|
||||
|
||||
for (auto & i : json["inputs"].items())
|
||||
flakeInputs.insert_or_assign(i.key(), FlakeInput(i.value()));
|
||||
inputs.insert_or_assign(i.key(), LockedInput(i.value()));
|
||||
}
|
||||
|
||||
nlohmann::json FlakeInputs::toJson() const
|
||||
nlohmann::json LockedInputs::toJson() const
|
||||
{
|
||||
nlohmann::json json;
|
||||
{
|
||||
auto j = nlohmann::json::object();
|
||||
for (auto & i : nonFlakeInputs)
|
||||
for (auto & i : inputs)
|
||||
j[i.first] = i.second.toJson();
|
||||
json["nonFlakeInputs"] = std::move(j);
|
||||
}
|
||||
{
|
||||
auto j = nlohmann::json::object();
|
||||
for (auto & i : flakeInputs)
|
||||
j[i.first.to_string()] = i.second.toJson();
|
||||
json["inputs"] = std::move(j);
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
bool FlakeInputs::isDirty() const
|
||||
bool LockedInputs::isDirty() const
|
||||
{
|
||||
for (auto & i : flakeInputs)
|
||||
for (auto & i : inputs)
|
||||
if (i.second.ref.isDirty() || i.second.isDirty()) return true;
|
||||
|
||||
for (auto & i : nonFlakeInputs)
|
||||
if (i.second.ref.isDirty()) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
nlohmann::json LockFile::toJson() const
|
||||
{
|
||||
auto json = FlakeInputs::toJson();
|
||||
auto json = LockedInputs::toJson();
|
||||
json["version"] = 2;
|
||||
return json;
|
||||
}
|
||||
|
|
|
@ -10,47 +10,15 @@ class Store;
|
|||
|
||||
namespace nix::flake {
|
||||
|
||||
/* Common lock file information about a flake input, namely the
|
||||
immutable ref and the NAR hash. */
|
||||
struct AbstractInput
|
||||
{
|
||||
FlakeRef ref;
|
||||
Hash narHash;
|
||||
|
||||
AbstractInput(const FlakeRef & flakeRef, const Hash & narHash)
|
||||
: ref(flakeRef), narHash(narHash)
|
||||
{
|
||||
assert(ref.isImmutable());
|
||||
};
|
||||
|
||||
AbstractInput(const nlohmann::json & json);
|
||||
|
||||
nlohmann::json toJson() const;
|
||||
|
||||
Path computeStorePath(Store & store) const;
|
||||
};
|
||||
|
||||
/* Lock file information about a non-flake input. */
|
||||
struct NonFlakeInput : AbstractInput
|
||||
{
|
||||
using AbstractInput::AbstractInput;
|
||||
|
||||
bool operator ==(const NonFlakeInput & other) const
|
||||
{
|
||||
return ref == other.ref && narHash == other.narHash;
|
||||
}
|
||||
};
|
||||
|
||||
struct FlakeInput;
|
||||
struct LockedInput;
|
||||
|
||||
/* Lock file information about the dependencies of a flake. */
|
||||
struct FlakeInputs
|
||||
struct LockedInputs
|
||||
{
|
||||
std::map<FlakeRef, FlakeInput> flakeInputs;
|
||||
std::map<FlakeAlias, NonFlakeInput> nonFlakeInputs;
|
||||
std::map<FlakeId, LockedInput> inputs;
|
||||
|
||||
FlakeInputs() {};
|
||||
FlakeInputs(const nlohmann::json & json);
|
||||
LockedInputs() {};
|
||||
LockedInputs(const nlohmann::json & json);
|
||||
|
||||
nlohmann::json toJson() const;
|
||||
|
||||
|
@ -60,47 +28,48 @@ struct FlakeInputs
|
|||
};
|
||||
|
||||
/* Lock file information about a flake input. */
|
||||
struct FlakeInput : FlakeInputs, AbstractInput
|
||||
struct LockedInput : LockedInputs
|
||||
{
|
||||
FlakeId id;
|
||||
FlakeRef ref;
|
||||
Hash narHash;
|
||||
|
||||
FlakeInput(const FlakeId & id, const FlakeRef & flakeRef, const Hash & narHash)
|
||||
: AbstractInput(flakeRef, narHash), id(id) {};
|
||||
LockedInput(const FlakeRef & ref, const Hash & narHash)
|
||||
: ref(ref), narHash(narHash)
|
||||
{
|
||||
assert(ref.isImmutable());
|
||||
};
|
||||
|
||||
FlakeInput(const nlohmann::json & json);
|
||||
LockedInput(const nlohmann::json & json);
|
||||
|
||||
bool operator ==(const FlakeInput & other) const
|
||||
bool operator ==(const LockedInput & other) const
|
||||
{
|
||||
return
|
||||
id == other.id
|
||||
&& ref == other.ref
|
||||
ref == other.ref
|
||||
&& narHash == other.narHash
|
||||
&& flakeInputs == other.flakeInputs
|
||||
&& nonFlakeInputs == other.nonFlakeInputs;
|
||||
&& inputs == other.inputs;
|
||||
}
|
||||
|
||||
nlohmann::json toJson() const;
|
||||
|
||||
Path computeStorePath(Store & store) const;
|
||||
};
|
||||
|
||||
/* An entire lock file. Note that this cannot be a FlakeInput for the
|
||||
top-level flake, because then the lock file would need to contain
|
||||
the hash of the top-level flake, but committing the lock file
|
||||
would invalidate that hash. */
|
||||
struct LockFile : FlakeInputs
|
||||
struct LockFile : LockedInputs
|
||||
{
|
||||
bool operator ==(const LockFile & other) const
|
||||
{
|
||||
return
|
||||
flakeInputs == other.flakeInputs
|
||||
&& nonFlakeInputs == other.nonFlakeInputs;
|
||||
return inputs == other.inputs;
|
||||
}
|
||||
|
||||
LockFile() {}
|
||||
LockFile(const nlohmann::json & json) : FlakeInputs(json) {}
|
||||
LockFile(FlakeInput && dep)
|
||||
LockFile(const nlohmann::json & json) : LockedInputs(json) {}
|
||||
LockFile(LockedInput && dep)
|
||||
{
|
||||
flakeInputs = std::move(dep.flakeInputs);
|
||||
nonFlakeInputs = std::move(dep.nonFlakeInputs);
|
||||
inputs = std::move(dep.inputs);
|
||||
}
|
||||
|
||||
nlohmann::json toJson() const;
|
||||
|
|
|
@ -31,7 +31,6 @@ GitInfo exportGit(ref<Store> store, std::string uri,
|
|||
// or revision is given, then allow the use of an unclean working
|
||||
// tree.
|
||||
if (!ref && !rev && isLocal) {
|
||||
|
||||
bool clean = true;
|
||||
|
||||
try {
|
||||
|
|
|
@ -217,25 +217,20 @@ void makeFlakeClosureGCRoot(Store & store,
|
|||
assert(store.isValidPath(resFlake.flake.sourceInfo.storePath));
|
||||
closure.insert(resFlake.flake.sourceInfo.storePath);
|
||||
|
||||
std::queue<std::reference_wrapper<const flake::FlakeInputs>> queue;
|
||||
std::queue<std::reference_wrapper<const flake::LockedInputs>> queue;
|
||||
queue.push(resFlake.lockFile);
|
||||
|
||||
while (!queue.empty()) {
|
||||
const flake::FlakeInputs & flake = queue.front();
|
||||
const flake::LockedInputs & flake = queue.front();
|
||||
queue.pop();
|
||||
/* Note: due to lazy fetching, these paths might not exist
|
||||
yet. */
|
||||
for (auto & dep : flake.flakeInputs) {
|
||||
for (auto & dep : flake.inputs) {
|
||||
auto path = dep.second.computeStorePath(store);
|
||||
if (store.isValidPath(path))
|
||||
closure.insert(path);
|
||||
queue.push(dep.second);
|
||||
}
|
||||
for (auto & dep : flake.nonFlakeInputs) {
|
||||
auto path = dep.second.computeStorePath(store);
|
||||
if (store.isValidPath(path))
|
||||
closure.insert(path);
|
||||
}
|
||||
}
|
||||
|
||||
if (closure.empty()) return;
|
||||
|
|
|
@ -240,10 +240,13 @@ cat > $flake3Dir/flake.nix <<EOF
|
|||
|
||||
edition = 201909;
|
||||
|
||||
inputs = [ "flake1" "flake2" ];
|
||||
|
||||
nonFlakeInputs = {
|
||||
nonFlake = "$nonFlakeDir";
|
||||
inputs = {
|
||||
flake1 = {};
|
||||
flake2 = {};
|
||||
nonFlake = {
|
||||
uri = "$nonFlakeDir";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
|
||||
description = "Fnord";
|
||||
|
@ -306,23 +309,24 @@ cat > $flake3Dir/flake.nix <<EOF
|
|||
|
||||
edition = 201909;
|
||||
|
||||
inputs = [ "flake1" "flake2" ];
|
||||
|
||||
nonFlakeInputs = {
|
||||
nonFlake = "$nonFlakeDir";
|
||||
inputs = {
|
||||
nonFlake = {
|
||||
uri = "$nonFlakeDir";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
|
||||
description = "Fnord";
|
||||
|
||||
outputs = inputs: rec {
|
||||
packages.sth = inputs.flake1.packages.foo;
|
||||
outputs = { self, flake1, flake2, nonFlake }: rec {
|
||||
packages.sth = flake1.packages.foo;
|
||||
packages.fnord =
|
||||
with import ./config.nix;
|
||||
mkDerivation {
|
||||
inherit system;
|
||||
name = "fnord";
|
||||
buildCommand = ''
|
||||
cat \${inputs.nonFlake}/README.md > \$out
|
||||
cat \${nonFlake}/README.md > \$out
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue