mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-10 08:16:15 +02:00
Merge branch 'master' into overlayfs-store
This commit is contained in:
commit
8225b7a011
12 changed files with 219 additions and 7 deletions
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
2.17.0
|
2.18.0
|
||||||
|
|
|
@ -11,8 +11,12 @@
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{
|
{
|
||||||
nested = { foo = 1; };
|
nested = {
|
||||||
nested = { ${"ba" + "r"} = 2; };
|
foo = 1;
|
||||||
|
};
|
||||||
|
nested = {
|
||||||
|
${"ba" + "r"} = 2;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -22,8 +26,17 @@
|
||||||
{ nested = { bar = 2; foo = 1; }; }
|
{ nested = { bar = 2; foo = 1; }; }
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the feature of merging multiple attribute set declarations is of questionable value.
|
Note that the feature of merging multiple *full declarations* of attribute sets like `nested` in the example is of questionable value.
|
||||||
It allows writing expressions that are very hard to read, for instance when there are many lines of code between two declarations of the same attribute.
|
It allows writing expressions that are very hard to read, for instance when there are many lines of code between two declarations of the same attribute.
|
||||||
This has been around for a long time and is therefore supported for backwards compatibility, but should not be relied upon.
|
This has been around for a long time and is therefore supported for backwards compatibility, but should not be relied upon.
|
||||||
|
|
||||||
|
Instead, consider using the *nested attribute path* syntax:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
{
|
||||||
|
nested.foo = 1;
|
||||||
|
nested.${"ba" + "r"} = 2;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
* Tarball flakes can now redirect to an "immutable" URL that will be recorded in lock files. This allows the use of "mutable" tarball URLs like `https://example.org/hello/latest.tar.gz` in flakes. See the [tarball fetcher](../protocols/tarball-fetcher.md) for details.
|
* Tarball flakes can now redirect to an "immutable" URL that will be recorded in lock files. This allows the use of "mutable" tarball URLs like `https://example.org/hello/latest.tar.gz` in flakes. See the [tarball fetcher](../protocols/tarball-fetcher.md) for details.
|
||||||
|
|
|
@ -1 +1,10 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
- Two new builtin functions,
|
||||||
|
[`builtins.parseFlakeRef`](@docroot@/language/builtins.md#builtins-parseFlakeRef)
|
||||||
|
and
|
||||||
|
[`builtins.flakeRefToString`](@docroot@/language/builtins.md#builtins-flakeRefToString),
|
||||||
|
have been added.
|
||||||
|
These functions are useful for converting between flake references encoded as attribute sets and URLs.
|
||||||
|
|
||||||
|
- [`builtins.toJSON`](@docroot@/language/builtins.md#builtins-parseFlakeRef) now prints [--show-trace](@docroot@/command-ref/conf-file.html#conf-show-trace) items for the path in which it finds an evaluation error.
|
||||||
|
|
|
@ -793,6 +793,101 @@ static RegisterPrimOp r2({
|
||||||
.experimentalFeature = Xp::Flakes,
|
.experimentalFeature = Xp::Flakes,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
static void prim_parseFlakeRef(
|
||||||
|
EvalState & state,
|
||||||
|
const PosIdx pos,
|
||||||
|
Value * * args,
|
||||||
|
Value & v)
|
||||||
|
{
|
||||||
|
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos,
|
||||||
|
"while evaluating the argument passed to builtins.parseFlakeRef"));
|
||||||
|
auto attrs = parseFlakeRef(flakeRefS, {}, true).toAttrs();
|
||||||
|
auto binds = state.buildBindings(attrs.size());
|
||||||
|
for (const auto & [key, value] : attrs) {
|
||||||
|
auto s = state.symbols.create(key);
|
||||||
|
auto & vv = binds.alloc(s);
|
||||||
|
std::visit(overloaded {
|
||||||
|
[&vv](const std::string & value) { vv.mkString(value); },
|
||||||
|
[&vv](const uint64_t & value) { vv.mkInt(value); },
|
||||||
|
[&vv](const Explicit<bool> & value) { vv.mkBool(value.t); }
|
||||||
|
}, value);
|
||||||
|
}
|
||||||
|
v.mkAttrs(binds);
|
||||||
|
}
|
||||||
|
|
||||||
|
static RegisterPrimOp r3({
|
||||||
|
.name = "__parseFlakeRef",
|
||||||
|
.args = {"flake-ref"},
|
||||||
|
.doc = R"(
|
||||||
|
Parse a flake reference, and return its exploded form.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
```nix
|
||||||
|
builtins.parseFlakeRef "github:NixOS/nixpkgs/23.05?dir=lib"
|
||||||
|
```
|
||||||
|
evaluates to:
|
||||||
|
```nix
|
||||||
|
{ dir = "lib"; owner = "NixOS"; ref = "23.05"; repo = "nixpkgs"; type = "github"; }
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
.fun = prim_parseFlakeRef,
|
||||||
|
.experimentalFeature = Xp::Flakes,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
static void prim_flakeRefToString(
|
||||||
|
EvalState & state,
|
||||||
|
const PosIdx pos,
|
||||||
|
Value * * args,
|
||||||
|
Value & v)
|
||||||
|
{
|
||||||
|
state.forceAttrs(*args[0], noPos,
|
||||||
|
"while evaluating the argument passed to builtins.flakeRefToString");
|
||||||
|
fetchers::Attrs attrs;
|
||||||
|
for (const auto & attr : *args[0]->attrs) {
|
||||||
|
auto t = attr.value->type();
|
||||||
|
if (t == nInt) {
|
||||||
|
attrs.emplace(state.symbols[attr.name],
|
||||||
|
(uint64_t) attr.value->integer);
|
||||||
|
} else if (t == nBool) {
|
||||||
|
attrs.emplace(state.symbols[attr.name],
|
||||||
|
Explicit<bool> { attr.value->boolean });
|
||||||
|
} else if (t == nString) {
|
||||||
|
attrs.emplace(state.symbols[attr.name],
|
||||||
|
std::string(attr.value->str()));
|
||||||
|
} else {
|
||||||
|
state.error(
|
||||||
|
"flake reference attribute sets may only contain integers, Booleans, "
|
||||||
|
"and strings, but attribute '%s' is %s",
|
||||||
|
state.symbols[attr.name],
|
||||||
|
showType(*attr.value)).debugThrow<EvalError>();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
auto flakeRef = FlakeRef::fromAttrs(attrs);
|
||||||
|
v.mkString(flakeRef.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
static RegisterPrimOp r4({
|
||||||
|
.name = "__flakeRefToString",
|
||||||
|
.args = {"attrs"},
|
||||||
|
.doc = R"(
|
||||||
|
Convert a flake reference from attribute set format to URL format.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
```nix
|
||||||
|
builtins.flakeRefToString {
|
||||||
|
dir = "lib"; owner = "NixOS"; ref = "23.05"; repo = "nixpkgs"; type = "github";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
evaluates to
|
||||||
|
```nix
|
||||||
|
"github:NixOS/nixpkgs/23.05?dir=lib"
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
.fun = prim_flakeRefToString,
|
||||||
|
.experimentalFeature = Xp::Flakes,
|
||||||
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Fingerprint LockedFlake::getFingerprint() const
|
Fingerprint LockedFlake::getFingerprint() const
|
||||||
|
|
|
@ -43,6 +43,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case nNull:
|
case nNull:
|
||||||
|
// already initialized as null
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case nAttrs: {
|
case nAttrs: {
|
||||||
|
@ -59,7 +60,13 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
names.emplace(state.symbols[j.name]);
|
names.emplace(state.symbols[j.name]);
|
||||||
for (auto & j : names) {
|
for (auto & j : names) {
|
||||||
Attr & a(*v.attrs->find(state.symbols.create(j)));
|
Attr & a(*v.attrs->find(state.symbols.create(j)));
|
||||||
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
|
try {
|
||||||
|
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace(state.positions[a.pos],
|
||||||
|
hintfmt("while evaluating attribute '%1%'", j));
|
||||||
|
throw;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore);
|
return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore);
|
||||||
|
@ -68,8 +75,17 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
|
|
||||||
case nList: {
|
case nList: {
|
||||||
out = json::array();
|
out = json::array();
|
||||||
for (auto elem : v.listItems())
|
int i = 0;
|
||||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
for (auto elem : v.listItems()) {
|
||||||
|
try {
|
||||||
|
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace({},
|
||||||
|
hintfmt("while evaluating list element at index %1%", i));
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
i++;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1634,6 +1634,8 @@ void LocalStore::verifyPath(const StorePath & path, std::function<bool(const Sto
|
||||||
canInvalidate = false;
|
canInvalidate = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto pathS = printStorePath(path);
|
||||||
|
|
||||||
if (canInvalidate) {
|
if (canInvalidate) {
|
||||||
printInfo("path '%s' disappeared, removing from database...", pathS);
|
printInfo("path '%s' disappeared, removing from database...", pathS);
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
57
tests/lang/eval-fail-toJSON.err.exp
Normal file
57
tests/lang/eval-fail-toJSON.err.exp
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
error:
|
||||||
|
… while calling the 'toJSON' builtin
|
||||||
|
|
||||||
|
at /pwd/lang/eval-fail-toJSON.nix:1:1:
|
||||||
|
|
||||||
|
1| builtins.toJSON {
|
||||||
|
| ^
|
||||||
|
2| a.b = [
|
||||||
|
|
||||||
|
… while evaluating attribute 'a'
|
||||||
|
|
||||||
|
at /pwd/lang/eval-fail-toJSON.nix:2:3:
|
||||||
|
|
||||||
|
1| builtins.toJSON {
|
||||||
|
2| a.b = [
|
||||||
|
| ^
|
||||||
|
3| true
|
||||||
|
|
||||||
|
… while evaluating attribute 'b'
|
||||||
|
|
||||||
|
at /pwd/lang/eval-fail-toJSON.nix:2:3:
|
||||||
|
|
||||||
|
1| builtins.toJSON {
|
||||||
|
2| a.b = [
|
||||||
|
| ^
|
||||||
|
3| true
|
||||||
|
|
||||||
|
… while evaluating list element at index 3
|
||||||
|
|
||||||
|
… while evaluating attribute 'c'
|
||||||
|
|
||||||
|
at /pwd/lang/eval-fail-toJSON.nix:7:7:
|
||||||
|
|
||||||
|
6| {
|
||||||
|
7| c.d = throw "hah no";
|
||||||
|
| ^
|
||||||
|
8| }
|
||||||
|
|
||||||
|
… while evaluating attribute 'd'
|
||||||
|
|
||||||
|
at /pwd/lang/eval-fail-toJSON.nix:7:7:
|
||||||
|
|
||||||
|
6| {
|
||||||
|
7| c.d = throw "hah no";
|
||||||
|
| ^
|
||||||
|
8| }
|
||||||
|
|
||||||
|
… while calling the 'throw' builtin
|
||||||
|
|
||||||
|
at /pwd/lang/eval-fail-toJSON.nix:7:13:
|
||||||
|
|
||||||
|
6| {
|
||||||
|
7| c.d = throw "hah no";
|
||||||
|
| ^
|
||||||
|
8| }
|
||||||
|
|
||||||
|
error: hah no
|
10
tests/lang/eval-fail-toJSON.nix
Normal file
10
tests/lang/eval-fail-toJSON.nix
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
builtins.toJSON {
|
||||||
|
a.b = [
|
||||||
|
true
|
||||||
|
false
|
||||||
|
"it's a bird"
|
||||||
|
{
|
||||||
|
c.d = throw "hah no";
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
1
tests/lang/eval-okay-flake-ref-to-string.exp
Normal file
1
tests/lang/eval-okay-flake-ref-to-string.exp
Normal file
|
@ -0,0 +1 @@
|
||||||
|
"github:NixOS/nixpkgs/23.05?dir=lib"
|
7
tests/lang/eval-okay-flake-ref-to-string.nix
Normal file
7
tests/lang/eval-okay-flake-ref-to-string.nix
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
builtins.flakeRefToString {
|
||||||
|
type = "github";
|
||||||
|
owner = "NixOS";
|
||||||
|
repo = "nixpkgs";
|
||||||
|
ref = "23.05";
|
||||||
|
dir = "lib";
|
||||||
|
}
|
1
tests/lang/eval-okay-parse-flake-ref.exp
Normal file
1
tests/lang/eval-okay-parse-flake-ref.exp
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{ dir = "lib"; owner = "NixOS"; ref = "23.05"; repo = "nixpkgs"; type = "github"; }
|
1
tests/lang/eval-okay-parse-flake-ref.nix
Normal file
1
tests/lang/eval-okay-parse-flake-ref.nix
Normal file
|
@ -0,0 +1 @@
|
||||||
|
builtins.parseFlakeRef "github:NixOS/nixpkgs/23.05?dir=lib"
|
Loading…
Reference in a new issue