mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-29 09:06:15 +02:00
Merge remote-tracking branch 'nixos/master'
This commit is contained in:
commit
1314205843
308 changed files with 8846 additions and 4452 deletions
|
@ -17,7 +17,7 @@ indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
# Match c++/shell/perl, set indent to spaces with width of four
|
# Match c++/shell/perl, set indent to spaces with width of four
|
||||||
[*.{hpp,cc,hh,sh,pl}]
|
[*.{hpp,cc,hh,sh,pl,xs}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v1.4.0
|
uses: zeebe-io/backport-action@v2.1.1
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
2.19.0
|
2.20.0
|
||||||
|
|
|
@ -8,15 +8,19 @@ CXX = @CXX@
|
||||||
CXXFLAGS = @CXXFLAGS@
|
CXXFLAGS = @CXXFLAGS@
|
||||||
CXXLTO = @CXXLTO@
|
CXXLTO = @CXXLTO@
|
||||||
EDITLINE_LIBS = @EDITLINE_LIBS@
|
EDITLINE_LIBS = @EDITLINE_LIBS@
|
||||||
|
ENABLE_BUILD = @ENABLE_BUILD@
|
||||||
ENABLE_S3 = @ENABLE_S3@
|
ENABLE_S3 = @ENABLE_S3@
|
||||||
|
ENABLE_TESTS = @ENABLE_TESTS@
|
||||||
GTEST_LIBS = @GTEST_LIBS@
|
GTEST_LIBS = @GTEST_LIBS@
|
||||||
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
||||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
HAVE_SECCOMP = @HAVE_SECCOMP@
|
||||||
HOST_OS = @host_os@
|
HOST_OS = @host_os@
|
||||||
|
INSTALL_UNIT_TESTS = @INSTALL_UNIT_TESTS@
|
||||||
LDFLAGS = @LDFLAGS@
|
LDFLAGS = @LDFLAGS@
|
||||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
||||||
LIBCURL_LIBS = @LIBCURL_LIBS@
|
LIBCURL_LIBS = @LIBCURL_LIBS@
|
||||||
|
LIBGIT2_LIBS = @LIBGIT2_LIBS@
|
||||||
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
||||||
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||||
|
@ -28,6 +32,8 @@ SODIUM_LIBS = @SODIUM_LIBS@
|
||||||
SQLITE3_LIBS = @SQLITE3_LIBS@
|
SQLITE3_LIBS = @SQLITE3_LIBS@
|
||||||
bash = @bash@
|
bash = @bash@
|
||||||
bindir = @bindir@
|
bindir = @bindir@
|
||||||
|
checkbindir = @checkbindir@
|
||||||
|
checklibdir = @checklibdir@
|
||||||
datadir = @datadir@
|
datadir = @datadir@
|
||||||
datarootdir = @datarootdir@
|
datarootdir = @datarootdir@
|
||||||
doc_generate = @doc_generate@
|
doc_generate = @doc_generate@
|
||||||
|
@ -35,6 +41,7 @@ docdir = @docdir@
|
||||||
embedded_sandbox_shell = @embedded_sandbox_shell@
|
embedded_sandbox_shell = @embedded_sandbox_shell@
|
||||||
exec_prefix = @exec_prefix@
|
exec_prefix = @exec_prefix@
|
||||||
includedir = @includedir@
|
includedir = @includedir@
|
||||||
|
internal_api_docs = @internal_api_docs@
|
||||||
libdir = @libdir@
|
libdir = @libdir@
|
||||||
libexecdir = @libexecdir@
|
libexecdir = @libexecdir@
|
||||||
localstatedir = @localstatedir@
|
localstatedir = @localstatedir@
|
||||||
|
@ -46,6 +53,3 @@ sandbox_shell = @sandbox_shell@
|
||||||
storedir = @storedir@
|
storedir = @storedir@
|
||||||
sysconfdir = @sysconfdir@
|
sysconfdir = @sysconfdir@
|
||||||
system = @system@
|
system = @system@
|
||||||
ENABLE_BUILD = @ENABLE_BUILD@
|
|
||||||
ENABLE_TESTS = @ENABLE_TESTS@
|
|
||||||
internal_api_docs = @internal_api_docs@
|
|
||||||
|
|
21
configure.ac
21
configure.ac
|
@ -68,6 +68,9 @@ case "$host_os" in
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
|
||||||
|
ENSURE_NO_GCC_BUG_80431
|
||||||
|
|
||||||
|
|
||||||
# Check for pubsetbuf.
|
# Check for pubsetbuf.
|
||||||
AC_MSG_CHECKING([for pubsetbuf])
|
AC_MSG_CHECKING([for pubsetbuf])
|
||||||
AC_LANG_PUSH(C++)
|
AC_LANG_PUSH(C++)
|
||||||
|
@ -164,6 +167,18 @@ AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
||||||
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
||||||
AC_SUBST(ENABLE_TESTS)
|
AC_SUBST(ENABLE_TESTS)
|
||||||
|
|
||||||
|
AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]),
|
||||||
|
INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no)
|
||||||
|
AC_SUBST(INSTALL_UNIT_TESTS)
|
||||||
|
|
||||||
|
AC_ARG_WITH(check-bin-dir, AS_HELP_STRING([--with-check-bin-dir=PATH],[path to install unit tests for running later (defaults to $libexecdir/nix)]),
|
||||||
|
checkbindir=$withval, checkbindir=$libexecdir/nix)
|
||||||
|
AC_SUBST(checkbindir)
|
||||||
|
|
||||||
|
AC_ARG_WITH(check-lib-dir, AS_HELP_STRING([--with-check-lib-dir=PATH],[path to install unit tests for running later (defaults to $libdir)]),
|
||||||
|
checklibdir=$withval, checklibdir=$libdir)
|
||||||
|
AC_SUBST(checklibdir)
|
||||||
|
|
||||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
||||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
||||||
internal_api_docs=$enableval, internal_api_docs=no)
|
internal_api_docs=$enableval, internal_api_docs=no)
|
||||||
|
@ -332,9 +347,15 @@ AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation
|
||||||
doc_generate=$enableval, doc_generate=yes)
|
doc_generate=$enableval, doc_generate=yes)
|
||||||
AC_SUBST(doc_generate)
|
AC_SUBST(doc_generate)
|
||||||
|
|
||||||
|
|
||||||
# Look for lowdown library.
|
# Look for lowdown library.
|
||||||
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
|
|
||||||
|
# Look for libgit2.
|
||||||
|
PKG_CHECK_MODULES([LIBGIT2], [libgit2])
|
||||||
|
|
||||||
|
|
||||||
# Setuid installations.
|
# Setuid installations.
|
||||||
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ $(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "opt-"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "conf"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/nix.json: $(bindir)/nix
|
$(d)/nix.json: $(bindir)/nix
|
||||||
|
|
|
@ -18,6 +18,8 @@
|
||||||
- [Uninstalling Nix](installation/uninstall.md)
|
- [Uninstalling Nix](installation/uninstall.md)
|
||||||
- [Nix Store](store/index.md)
|
- [Nix Store](store/index.md)
|
||||||
- [File System Object](store/file-system-object.md)
|
- [File System Object](store/file-system-object.md)
|
||||||
|
- [Store Object](store/store-object.md)
|
||||||
|
- [Store Path](store/store-path.md)
|
||||||
- [Nix Language](language/index.md)
|
- [Nix Language](language/index.md)
|
||||||
- [Data Types](language/values.md)
|
- [Data Types](language/values.md)
|
||||||
- [Language Constructs](language/constructs.md)
|
- [Language Constructs](language/constructs.md)
|
||||||
|
@ -113,6 +115,7 @@
|
||||||
- [C++ style guide](contributing/cxx.md)
|
- [C++ style guide](contributing/cxx.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/release-notes.md)
|
||||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||||
|
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
||||||
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
||||||
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
||||||
- [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md)
|
- [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md)
|
||||||
|
|
|
@ -63,7 +63,7 @@ The command line interface and Nix expressions are what users deal with most.
|
||||||
> The Nix language itself does not have a notion of *packages* or *configurations*.
|
> The Nix language itself does not have a notion of *packages* or *configurations*.
|
||||||
> As far as we are concerned here, the inputs and results of a build plan are just data.
|
> As far as we are concerned here, the inputs and results of a build plan are just data.
|
||||||
|
|
||||||
Underlying the command line interface and the Nix language evaluator is the [Nix store](../glossary.md#gloss-store), a mechanism to keep track of build plans, data, and references between them.
|
Underlying the command line interface and the Nix language evaluator is the [Nix store](../store/index.md), a mechanism to keep track of build plans, data, and references between them.
|
||||||
It can also execute build plans to produce new data, which are made available to the operating system as files.
|
It can also execute build plans to produce new data, which are made available to the operating system as files.
|
||||||
|
|
||||||
A build plan itself is a series of *build tasks*, together with their build inputs.
|
A build plan itself is a series of *build tasks*, together with their build inputs.
|
||||||
|
|
|
@ -162,6 +162,24 @@ Please observe these guidelines to ease reviews:
|
||||||
> This is a note.
|
> This is a note.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Highlight examples as such:
|
||||||
|
|
||||||
|
````
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```console
|
||||||
|
> $ nix --version
|
||||||
|
> ```
|
||||||
|
````
|
||||||
|
|
||||||
|
Highlight syntax definiions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation:
|
||||||
|
|
||||||
|
````
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
|
> *attribute-set* = `{` [ *attribute-name* `=` *expression* `;` ... ] `}`
|
||||||
|
````
|
||||||
|
|
||||||
### The `@docroot@` variable
|
### The `@docroot@` variable
|
||||||
|
|
||||||
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||||
|
|
|
@ -210,7 +210,7 @@ See [supported compilation environments](#compilation-environments) and instruct
|
||||||
To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running:
|
To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
make clean && bear -- make -j$NIX_BUILD_CORES install
|
make clean && bear -- make -j$NIX_BUILD_CORES default check install
|
||||||
```
|
```
|
||||||
|
|
||||||
Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
||||||
|
|
|
@ -133,17 +133,17 @@ ran test tests/functional/${testName}.sh... [PASS]
|
||||||
or without `make`:
|
or without `make`:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ ./mk/run-test.sh tests/functional/${testName}.sh
|
$ ./mk/run-test.sh tests/functional/${testName}.sh tests/functional/init.sh
|
||||||
ran test tests/functional/${testName}.sh... [PASS]
|
ran test tests/functional/${testName}.sh... [PASS]
|
||||||
```
|
```
|
||||||
|
|
||||||
To see the complete output, one can also run:
|
To see the complete output, one can also run:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ ./mk/debug-test.sh tests/functional/${testName}.sh
|
$ ./mk/debug-test.sh tests/functional/${testName}.sh tests/functional/init.sh
|
||||||
+ foo
|
+(${testName}.sh:1) foo
|
||||||
output from foo
|
output from foo
|
||||||
+ bar
|
+(${testName}.sh:2) bar
|
||||||
output from bar
|
output from bar
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
@ -175,7 +175,7 @@ edit it like so:
|
||||||
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ ./mk/debug-test.sh tests/functional/${testName}.sh
|
$ ./mk/debug-test.sh tests/functional/${testName}.sh tests/functional/init.sh
|
||||||
...
|
...
|
||||||
+ gdb blash blub
|
+ gdb blash blub
|
||||||
GNU gdb (GDB) 12.1
|
GNU gdb (GDB) 12.1
|
||||||
|
|
|
@ -59,7 +59,7 @@
|
||||||
- [store]{#gloss-store}
|
- [store]{#gloss-store}
|
||||||
|
|
||||||
A collection of store objects, with operations to manipulate that collection.
|
A collection of store objects, with operations to manipulate that collection.
|
||||||
See [Nix Store] for details.
|
See [Nix store](./store/index.md) for details.
|
||||||
|
|
||||||
There are many types of stores.
|
There are many types of stores.
|
||||||
See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
|
See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
|
||||||
|
@ -86,10 +86,13 @@
|
||||||
|
|
||||||
- [store path]{#gloss-store-path}
|
- [store path]{#gloss-store-path}
|
||||||
|
|
||||||
The location of a [store object] in the file system, i.e., an
|
The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory.
|
||||||
immediate child of the Nix store directory.
|
|
||||||
|
|
||||||
Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
|
> **Example**
|
||||||
|
>
|
||||||
|
> `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
|
||||||
|
|
||||||
|
See [Store Path](@docroot@/store/store-path.md) for details.
|
||||||
|
|
||||||
[store path]: #gloss-store-path
|
[store path]: #gloss-store-path
|
||||||
|
|
||||||
|
|
|
@ -132,6 +132,32 @@ a = src-set.a; b = src-set.b; c = src-set.c;
|
||||||
when used while defining local variables in a let-expression or while
|
when used while defining local variables in a let-expression or while
|
||||||
defining a set.
|
defining a set.
|
||||||
|
|
||||||
|
In a `let` expression, `inherit` can be used to selectively bring specific attributes of a set into scope. For example
|
||||||
|
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
x = { a = 1; b = 2; };
|
||||||
|
inherit (builtins) attrNames;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
names = attrNames x;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
is equivalent to
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
x = { a = 1; b = 2; };
|
||||||
|
in
|
||||||
|
{
|
||||||
|
names = builtins.attrNames x;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
both evaluate to `{ names = [ "a" "b" ]; }`.
|
||||||
|
|
||||||
## Functions
|
## Functions
|
||||||
|
|
||||||
Functions have the following form:
|
Functions have the following form:
|
||||||
|
@ -146,65 +172,65 @@ three kinds of patterns:
|
||||||
|
|
||||||
- If a pattern is a single identifier, then the function matches any
|
- If a pattern is a single identifier, then the function matches any
|
||||||
argument. Example:
|
argument. Example:
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
let negate = x: !x;
|
let negate = x: !x;
|
||||||
concat = x: y: x + y;
|
concat = x: y: x + y;
|
||||||
in if negate true then concat "foo" "bar" else ""
|
in if negate true then concat "foo" "bar" else ""
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that `concat` is a function that takes one argument and returns
|
Note that `concat` is a function that takes one argument and returns
|
||||||
a function that takes another argument. This allows partial
|
a function that takes another argument. This allows partial
|
||||||
parameterisation (i.e., only filling some of the arguments of a
|
parameterisation (i.e., only filling some of the arguments of a
|
||||||
function); e.g.,
|
function); e.g.,
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
map (concat "foo") [ "bar" "bla" "abc" ]
|
map (concat "foo") [ "bar" "bla" "abc" ]
|
||||||
```
|
```
|
||||||
|
|
||||||
evaluates to `[ "foobar" "foobla" "fooabc" ]`.
|
evaluates to `[ "foobar" "foobla" "fooabc" ]`.
|
||||||
|
|
||||||
- A *set pattern* of the form `{ name1, name2, …, nameN }` matches a
|
- A *set pattern* of the form `{ name1, name2, …, nameN }` matches a
|
||||||
set containing the listed attributes, and binds the values of those
|
set containing the listed attributes, and binds the values of those
|
||||||
attributes to variables in the function body. For example, the
|
attributes to variables in the function body. For example, the
|
||||||
function
|
function
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ x, y, z }: z + y + x
|
{ x, y, z }: z + y + x
|
||||||
```
|
```
|
||||||
|
|
||||||
can only be called with a set containing exactly the attributes `x`,
|
can only be called with a set containing exactly the attributes `x`,
|
||||||
`y` and `z`. No other attributes are allowed. If you want to allow
|
`y` and `z`. No other attributes are allowed. If you want to allow
|
||||||
additional arguments, you can use an ellipsis (`...`):
|
additional arguments, you can use an ellipsis (`...`):
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ x, y, z, ... }: z + y + x
|
{ x, y, z, ... }: z + y + x
|
||||||
```
|
```
|
||||||
|
|
||||||
This works on any set that contains at least the three named
|
This works on any set that contains at least the three named
|
||||||
attributes.
|
attributes.
|
||||||
|
|
||||||
It is possible to provide *default values* for attributes, in
|
It is possible to provide *default values* for attributes, in
|
||||||
which case they are allowed to be missing. A default value is
|
which case they are allowed to be missing. A default value is
|
||||||
specified by writing `name ? e`, where *e* is an arbitrary
|
specified by writing `name ? e`, where *e* is an arbitrary
|
||||||
expression. For example,
|
expression. For example,
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ x, y ? "foo", z ? "bar" }: z + y + x
|
{ x, y ? "foo", z ? "bar" }: z + y + x
|
||||||
```
|
```
|
||||||
|
|
||||||
specifies a function that only requires an attribute named `x`, but
|
specifies a function that only requires an attribute named `x`, but
|
||||||
optionally accepts `y` and `z`.
|
optionally accepts `y` and `z`.
|
||||||
|
|
||||||
- An `@`-pattern provides a means of referring to the whole value
|
- An `@`-pattern provides a means of referring to the whole value
|
||||||
being matched:
|
being matched:
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
args@{ x, y, z, ... }: z + y + x + args.a
|
args@{ x, y, z, ... }: z + y + x + args.a
|
||||||
```
|
```
|
||||||
|
|
||||||
but can also be written as:
|
but can also be written as:
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ x, y, z, ... } @ args: z + y + x + args.a
|
{ x, y, z, ... } @ args: z + y + x + args.a
|
||||||
```
|
```
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
| Inequality | *expr* `!=` *expr* | none | 11 |
|
| Inequality | *expr* `!=` *expr* | none | 11 |
|
||||||
| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 |
|
| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 |
|
||||||
| Logical disjunction (`OR`) | *bool* <code>\|\|</code> *bool* | left | 13 |
|
| Logical disjunction (`OR`) | *bool* <code>\|\|</code> *bool* | left | 13 |
|
||||||
| [Logical implication] | *bool* `->` *bool* | none | 14 |
|
| [Logical implication] | *bool* `->` *bool* | right | 14 |
|
||||||
|
|
||||||
[string]: ./values.md#type-string
|
[string]: ./values.md#type-string
|
||||||
[path]: ./values.md#type-path
|
[path]: ./values.md#type-path
|
||||||
|
|
77
doc/manual/src/release-notes/rl-2.19.md
Normal file
77
doc/manual/src/release-notes/rl-2.19.md
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
# Release 2.19 (2023-11-17)
|
||||||
|
|
||||||
|
- The experimental `nix` command can now act as a [shebang interpreter](@docroot@/command-ref/new-cli/nix.md#shebang-interpreter)
|
||||||
|
by appending the contents of any `#! nix` lines and the script's location into a single call.
|
||||||
|
|
||||||
|
- [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters.
|
||||||
|
|
||||||
|
- [Path-like flake references](@docroot@/command-ref/new-cli/nix3-flake.md#path-like-syntax) now accept arbitrary unicode characters (except `#` and `?`).
|
||||||
|
|
||||||
|
- The experimental feature `repl-flake` is no longer needed, as its functionality is now part of the `flakes` experimental feature. To get the previous behavior, use the `--file/--expr` flags accordingly.
|
||||||
|
|
||||||
|
- There is a new flake installable syntax `flakeref#.attrPath` where the "." prefix specifies that `attrPath` is interpreted from the root of the flake outputs, with no searching of default attribute prefixes like `packages.<SYSTEM>` or `legacyPackages.<SYSTEM>`.
|
||||||
|
|
||||||
|
- Nix adds `apple-virt` to the default system features on macOS systems that support virtualization. This is similar to what's done for the `kvm` system feature on Linux hosts.
|
||||||
|
|
||||||
|
- Add a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash).
|
||||||
|
|
||||||
|
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||||
|
|
||||||
|
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||||
|
As described in the documentation for that feature, this is because we anticipate polishing it and then stabilizing it before the rest of flakes.
|
||||||
|
|
||||||
|
- The interface for creating and updating lock files has been overhauled:
|
||||||
|
|
||||||
|
- [`nix flake lock`](@docroot@/command-ref/new-cli/nix3-flake-lock.md) only creates lock files and adds missing inputs now.
|
||||||
|
It will *never* update existing inputs.
|
||||||
|
|
||||||
|
- [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) does the same, but *will* update inputs.
|
||||||
|
- Passing no arguments will update all inputs of the current flake, just like it already did.
|
||||||
|
- Passing input names as arguments will ensure only those are updated. This replaces the functionality of `nix flake lock --update-input`
|
||||||
|
- To operate on a flake outside the current directory, you must now pass `--flake path/to/flake`.
|
||||||
|
|
||||||
|
- The flake-specific flags `--recreate-lock-file` and `--update-input` have been removed from all commands operating on installables.
|
||||||
|
They are superceded by `nix flake update`.
|
||||||
|
|
||||||
|
- Commit signature verification for the [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) is added as the new [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||||
|
|
||||||
|
- [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md)
|
||||||
|
(experimental) now returns a JSON map rather than JSON list.
|
||||||
|
The `path` field of each object has instead become the key in the outer map, since it is unique.
|
||||||
|
The `valid` field also goes away because we just use `null` instead.
|
||||||
|
|
||||||
|
- Old way:
|
||||||
|
|
||||||
|
```json5
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"path": "/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15",
|
||||||
|
"valid": true,
|
||||||
|
// ...
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path",
|
||||||
|
"valid": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
- New way
|
||||||
|
|
||||||
|
```json5
|
||||||
|
{
|
||||||
|
"/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15": {
|
||||||
|
// ...
|
||||||
|
},
|
||||||
|
"/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path": null,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This makes it match `nix derivation show`, which also maps store paths to information.
|
||||||
|
|
||||||
|
- When Nix is installed using the [binary installer](@docroot@/installation/installing-binary.md), in supported shells (Bash, Zsh, Fish)
|
||||||
|
[`XDG_DATA_DIRS`](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html#variables) is now populated with the path to the `/share` subdirectory of the current profile.
|
||||||
|
This means that command completion scripts, `.desktop` files, and similar artifacts installed via [`nix-env`](@docroot@/command-ref/nix-env.md) or [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md)
|
||||||
|
(experimental) can be found by any program that follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
|
||||||
|
|
||||||
|
- A new command `nix store add` has been added. It replaces `nix store add-file` and `nix store add-path` which are now deprecated.
|
|
@ -1,17 +1,2 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
- [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters.
|
|
||||||
|
|
||||||
- [Path-like flake references](@docroot@/command-ref/new-cli/nix3-flake.md#path-like-syntax) now accept arbitrary unicode characters (except `#` and `?`).
|
|
||||||
|
|
||||||
- The experimental feature `repl-flake` is no longer needed, as its functionality is now part of the `flakes` experimental feature. To get the previous behavior, use the `--file/--expr` flags accordingly.
|
|
||||||
|
|
||||||
- Introduce new flake installable syntax `flakeref#.attrPath` where the "." prefix denotes no searching of default attribute prefixes like `packages.<SYSTEM>` or `legacyPackages.<SYSTEM>`.
|
|
||||||
|
|
||||||
- Nix adds `apple-virt` to the default system features on macOS systems that support virtualization. This is similar to what's done for the `kvm` system feature on Linux hosts.
|
|
||||||
|
|
||||||
- Introduce a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash).
|
|
||||||
|
|
||||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
|
||||||
|
|
||||||
- `builtins.fetchTree` is now marked as stable.
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Nix Store
|
# Nix Store
|
||||||
|
|
||||||
The *Nix store* is an abstraction used by Nix to store immutable filesystem artifacts (such as software packages) that can have dependencies (*references*) between them.
|
The *Nix store* is an abstraction to store immutable file system data (such as software packages) that can have dependencies on other such data.
|
||||||
There are multiple implementations of the Nix store, such as the actual filesystem (`/nix/store`) and binary caches.
|
|
||||||
|
There are multiple implementations of Nix stores with different capabilities, such as the actual filesystem (`/nix/store`) or binary caches.
|
||||||
|
|
10
doc/manual/src/store/store-object.md
Normal file
10
doc/manual/src/store/store-object.md
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
## Store Object
|
||||||
|
|
||||||
|
A Nix store is a collection of *store objects* with *references* between them.
|
||||||
|
A store object consists of
|
||||||
|
|
||||||
|
- A [file system object](./file-system-object.md) as data
|
||||||
|
- A set of [store paths](./store-path.md) as references to other store objects
|
||||||
|
|
||||||
|
Store objects are [immutable](https://en.wikipedia.org/wiki/Immutable_object):
|
||||||
|
Once created, they do not change until they are deleted.
|
69
doc/manual/src/store/store-path.md
Normal file
69
doc/manual/src/store/store-path.md
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
# Store Path
|
||||||
|
|
||||||
|
Nix implements references to [store objects](./index.md#store-object) as *store paths*.
|
||||||
|
|
||||||
|
Think of a store path as an [opaque], [unique identifier]:
|
||||||
|
The only way to obtain store path is by adding or building store objects.
|
||||||
|
A store path will always reference exactly one store object.
|
||||||
|
|
||||||
|
[opaque]: https://en.m.wikipedia.org/wiki/Opaque_data_type
|
||||||
|
[unique identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier
|
||||||
|
|
||||||
|
Store paths are pairs of
|
||||||
|
|
||||||
|
- A 20-byte digest for identification
|
||||||
|
- A symbolic name for people to read
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> - Digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z`
|
||||||
|
> - Name: `firefox-33.1`
|
||||||
|
|
||||||
|
To make store objects accessible to operating system processes, stores have to expose store objects through the file system.
|
||||||
|
|
||||||
|
A store path is rendered to a file system path as the concatenation of
|
||||||
|
|
||||||
|
- [Store directory](#store-directory) (typically `/nix/store`)
|
||||||
|
- Path separator (`/`)
|
||||||
|
- Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
||||||
|
- Hyphen (`-`)
|
||||||
|
- Name
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> /nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1
|
||||||
|
> |--------| |------------------------------| |----------|
|
||||||
|
> store directory digest name
|
||||||
|
> ```
|
||||||
|
|
||||||
|
## Store Directory
|
||||||
|
|
||||||
|
Every [Nix store](./index.md) has a store directory.
|
||||||
|
|
||||||
|
Not every store can be accessed through the file system.
|
||||||
|
But if the store has a file system representation, the store directory contains the store’s [file system objects], which can be addressed by [store paths](#store-path).
|
||||||
|
|
||||||
|
[file system objects]: ./file-system-object.md
|
||||||
|
|
||||||
|
This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> The store directory defaults to `/nix/store`, but is in principle arbitrary.
|
||||||
|
|
||||||
|
It is important which store a given store object belongs to:
|
||||||
|
Files in the store object can contain store paths, and processes may read these paths.
|
||||||
|
Nix can only guarantee referential integrity if store paths do not cross store boundaries.
|
||||||
|
|
||||||
|
Therefore one can only copy store objects to a different store if
|
||||||
|
|
||||||
|
- The source and target stores' directories match
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
- The store object in question has no references, that is, contains no store paths
|
||||||
|
|
||||||
|
One cannot copy a store object to a store with a different store directory.
|
||||||
|
Instead, it has to be rebuilt, together with all its dependencies.
|
||||||
|
It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums.
|
23
flake.lock
23
flake.lock
|
@ -16,6 +16,22 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"libgit2": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1697646580,
|
||||||
|
"narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=",
|
||||||
|
"owner": "libgit2",
|
||||||
|
"repo": "libgit2",
|
||||||
|
"rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "libgit2",
|
||||||
|
"repo": "libgit2",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"lowdown-src": {
|
"lowdown-src": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
|
@ -34,11 +50,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1695283060,
|
"lastModified": 1700342017,
|
||||||
"narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=",
|
"narHash": "sha256-HaibwlWH5LuqsaibW3sIVjZQtEM/jWtOHX4Nk93abGE=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "31ed632c692e6a36cfc18083b88ece892f863ed4",
|
"rev": "decdf666c833a325cb4417041a90681499e06a41",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -67,6 +83,7 @@
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-compat": "flake-compat",
|
"flake-compat": "flake-compat",
|
||||||
|
"libgit2": "libgit2",
|
||||||
"lowdown-src": "lowdown-src",
|
"lowdown-src": "lowdown-src",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"nixpkgs-regression": "nixpkgs-regression"
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
|
|
19
flake.nix
19
flake.nix
|
@ -5,8 +5,9 @@
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||||
|
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
|
||||||
|
|
||||||
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat }:
|
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat, libgit2 }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (nixpkgs) lib;
|
inherit (nixpkgs) lib;
|
||||||
|
@ -162,6 +163,10 @@
|
||||||
|
|
||||||
testConfigureFlags = [
|
testConfigureFlags = [
|
||||||
"RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"
|
"RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"
|
||||||
|
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
|
||||||
|
"--enable-install-unit-tests"
|
||||||
|
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
|
||||||
|
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
|
||||||
];
|
];
|
||||||
|
|
||||||
internalApiDocsConfigureFlags = [
|
internalApiDocsConfigureFlags = [
|
||||||
|
@ -183,6 +188,7 @@
|
||||||
buildPackages.git
|
buildPackages.git
|
||||||
buildPackages.mercurial # FIXME: remove? only needed for tests
|
buildPackages.mercurial # FIXME: remove? only needed for tests
|
||||||
buildPackages.jq # Also for custom mdBook preprocessor.
|
buildPackages.jq # Also for custom mdBook preprocessor.
|
||||||
|
buildPackages.openssh # only needed for tests (ssh-keygen)
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
||||||
|
|
||||||
|
@ -191,6 +197,11 @@
|
||||||
bzip2 xz brotli editline
|
bzip2 xz brotli editline
|
||||||
openssl sqlite
|
openssl sqlite
|
||||||
libarchive
|
libarchive
|
||||||
|
(pkgs.libgit2.overrideAttrs (attrs: {
|
||||||
|
src = libgit2;
|
||||||
|
version = libgit2.lastModifiedDate;
|
||||||
|
cmakeFlags = (attrs.cmakeFlags or []) ++ ["-DUSE_SSH=exec"];
|
||||||
|
}))
|
||||||
boost
|
boost
|
||||||
lowdown-nix
|
lowdown-nix
|
||||||
libsodium
|
libsodium
|
||||||
|
@ -401,7 +412,8 @@
|
||||||
src = nixSrc;
|
src = nixSrc;
|
||||||
VERSION_SUFFIX = versionSuffix;
|
VERSION_SUFFIX = versionSuffix;
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ];
|
outputs = [ "out" "dev" "doc" ]
|
||||||
|
++ lib.optional (currentStdenv.hostPlatform != currentStdenv.buildPlatform) "check";
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
buildInputs = buildDeps
|
buildInputs = buildDeps
|
||||||
|
@ -707,7 +719,8 @@
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation {
|
||||||
name = "nix-super";
|
name = "nix-super";
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ];
|
outputs = [ "out" "dev" "doc" ]
|
||||||
|
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "check";
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps
|
nativeBuildInputs = nativeBuildDeps
|
||||||
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
||||||
|
|
64
m4/gcc_bug_80431.m4
Normal file
64
m4/gcc_bug_80431.m4
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
# Ensure that this bug is not present in the C++ toolchain we are using.
|
||||||
|
#
|
||||||
|
# URL for bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431
|
||||||
|
#
|
||||||
|
# The test program is from that issue, with only a slight modification
|
||||||
|
# to set an exit status instead of printing strings.
|
||||||
|
AC_DEFUN([ENSURE_NO_GCC_BUG_80431],
|
||||||
|
[
|
||||||
|
AC_MSG_CHECKING([that GCC bug 80431 is fixed])
|
||||||
|
AC_LANG_PUSH(C++)
|
||||||
|
AC_RUN_IFELSE(
|
||||||
|
[AC_LANG_PROGRAM(
|
||||||
|
[[
|
||||||
|
#include <cstdio>
|
||||||
|
|
||||||
|
static bool a = true;
|
||||||
|
static bool b = true;
|
||||||
|
|
||||||
|
struct Options { };
|
||||||
|
|
||||||
|
struct Option
|
||||||
|
{
|
||||||
|
Option(Options * options)
|
||||||
|
{
|
||||||
|
a = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
~Option()
|
||||||
|
{
|
||||||
|
b = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct MyOptions : Options { };
|
||||||
|
|
||||||
|
struct MyOptions2 : virtual MyOptions
|
||||||
|
{
|
||||||
|
Option foo{this};
|
||||||
|
};
|
||||||
|
]],
|
||||||
|
[[
|
||||||
|
{
|
||||||
|
MyOptions2 opts;
|
||||||
|
}
|
||||||
|
return (a << 1) | b;
|
||||||
|
]])],
|
||||||
|
[status_80431=0],
|
||||||
|
[status_80431=$?],
|
||||||
|
[
|
||||||
|
# Assume we're bug-free when cross-compiling
|
||||||
|
])
|
||||||
|
AC_LANG_POP(C++)
|
||||||
|
AS_CASE([$status_80431],
|
||||||
|
[0],[
|
||||||
|
AC_MSG_RESULT(yes)
|
||||||
|
],
|
||||||
|
[2],[
|
||||||
|
AC_MSG_RESULT(no)
|
||||||
|
AC_MSG_ERROR(Cannot build Nix with C++ compiler with this bug)
|
||||||
|
],
|
||||||
|
[
|
||||||
|
AC_MSG_RESULT(unexpected result $status_80431: not expected failure with bug, ignoring)
|
||||||
|
])
|
||||||
|
])
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
The team's main responsibility is to set a direction for the development of Nix and ensure that the code is in good shape.
|
The team's main responsibility is to guide and direct the development of Nix and ensure that the code is in good shape.
|
||||||
|
|
||||||
We aim to achieve this by improving the contributor experience and attracting more maintainers – that is, by helping other people contributing to Nix and eventually taking responsibility – in order to scale the development process to match users' needs.
|
We aim to achieve this by improving the contributor experience and attracting more maintainers – that is, by helping other people contributing to Nix and eventually taking responsibility – in order to scale the development process to match users' needs.
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,27 @@
|
||||||
test_dir=tests/functional
|
# Remove overall test dir (at most one of the two should match) and
|
||||||
|
# remove file extension.
|
||||||
|
test_name=$(echo -n "$test" | sed \
|
||||||
|
-e "s|^unit-test-data/||" \
|
||||||
|
-e "s|^tests/functional/||" \
|
||||||
|
-e "s|\.sh$||" \
|
||||||
|
)
|
||||||
|
|
||||||
test=$(echo -n "$test" | sed -e "s|^$test_dir/||")
|
TESTS_ENVIRONMENT=(
|
||||||
|
"TEST_NAME=$test_name"
|
||||||
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
'NIX_REMOTE='
|
||||||
|
'PS4=+(${BASH_SOURCE[0]-$0}:$LINENO) '
|
||||||
|
)
|
||||||
|
|
||||||
: ${BASH:=/usr/bin/env bash}
|
: ${BASH:=/usr/bin/env bash}
|
||||||
|
|
||||||
|
run () {
|
||||||
|
cd "$(dirname $1)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -x -e -u -o pipefail $(basename $1)
|
||||||
|
}
|
||||||
|
|
||||||
init_test () {
|
init_test () {
|
||||||
cd "$test_dir" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
run "$init" 2>/dev/null > /dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
run_test_proper () {
|
run_test_proper () {
|
||||||
cd "$test_dir/$(dirname $test)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
run "$test"
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,9 +3,12 @@
|
||||||
set -eu -o pipefail
|
set -eu -o pipefail
|
||||||
|
|
||||||
test=$1
|
test=$1
|
||||||
|
init=${2-}
|
||||||
|
|
||||||
dir="$(dirname "${BASH_SOURCE[0]}")"
|
dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||||
source "$dir/common-test.sh"
|
source "$dir/common-test.sh"
|
||||||
|
|
||||||
(init_test)
|
if [ -n "$init" ]; then
|
||||||
|
(init_test)
|
||||||
|
fi
|
||||||
run_test_proper
|
run_test_proper
|
||||||
|
|
|
@ -122,14 +122,15 @@ $(foreach script, $(bin-scripts), $(eval $(call install-program-in,$(script),$(b
|
||||||
$(foreach script, $(bin-scripts), $(eval programs-list += $(script)))
|
$(foreach script, $(bin-scripts), $(eval programs-list += $(script)))
|
||||||
$(foreach script, $(noinst-scripts), $(eval programs-list += $(script)))
|
$(foreach script, $(noinst-scripts), $(eval programs-list += $(script)))
|
||||||
$(foreach template, $(template-files), $(eval $(call instantiate-template,$(template))))
|
$(foreach template, $(template-files), $(eval $(call instantiate-template,$(template))))
|
||||||
|
install_test_init=tests/functional/init.sh
|
||||||
$(foreach test, $(install-tests), \
|
$(foreach test, $(install-tests), \
|
||||||
$(eval $(call run-install-test,$(test))) \
|
$(eval $(call run-test,$(test),$(install_test_init))) \
|
||||||
$(eval installcheck: $(test).test))
|
$(eval installcheck: $(test).test))
|
||||||
$(foreach test-group, $(install-tests-groups), \
|
$(foreach test-group, $(install-tests-groups), \
|
||||||
$(eval $(call run-install-test-group,$(test-group))) \
|
$(eval $(call run-test-group,$(test-group),$(install_test_init))) \
|
||||||
$(eval installcheck: $(test-group).test-group) \
|
$(eval installcheck: $(test-group).test-group) \
|
||||||
$(foreach test, $($(test-group)-tests), \
|
$(foreach test, $($(test-group)-tests), \
|
||||||
$(eval $(call run-install-test,$(test))) \
|
$(eval $(call run-test,$(test),$(install_test_init))) \
|
||||||
$(eval $(test-group).test-group: $(test).test)))
|
$(eval $(test-group).test-group: $(test).test)))
|
||||||
|
|
||||||
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
||||||
|
|
|
@ -8,6 +8,7 @@ yellow=""
|
||||||
normal=""
|
normal=""
|
||||||
|
|
||||||
test=$1
|
test=$1
|
||||||
|
init=${2-}
|
||||||
|
|
||||||
dir="$(dirname "${BASH_SOURCE[0]}")"
|
dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||||
source "$dir/common-test.sh"
|
source "$dir/common-test.sh"
|
||||||
|
@ -21,7 +22,9 @@ if [ -t 1 ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
run_test () {
|
run_test () {
|
||||||
(init_test 2>/dev/null > /dev/null)
|
if [ -n "$init" ]; then
|
||||||
|
(init_test 2>/dev/null > /dev/null)
|
||||||
|
fi
|
||||||
log="$(run_test_proper 2>&1)" && status=0 || status=$?
|
log="$(run_test_proper 2>&1)" && status=0 || status=$?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
21
mk/tests.mk
21
mk/tests.mk
|
@ -2,19 +2,22 @@
|
||||||
|
|
||||||
test-deps =
|
test-deps =
|
||||||
|
|
||||||
define run-install-test
|
define run-bash
|
||||||
|
|
||||||
.PHONY: $1.test
|
.PHONY: $1
|
||||||
$1.test: $1 $(test-deps)
|
$1: $2
|
||||||
@env BASH=$(bash) $(bash) mk/run-test.sh $1 < /dev/null
|
@env BASH=$(bash) $(bash) $3 < /dev/null
|
||||||
|
|
||||||
.PHONY: $1.test-debug
|
|
||||||
$1.test-debug: $1 $(test-deps)
|
|
||||||
@env BASH=$(bash) $(bash) mk/debug-test.sh $1 < /dev/null
|
|
||||||
|
|
||||||
endef
|
endef
|
||||||
|
|
||||||
define run-install-test-group
|
define run-test
|
||||||
|
|
||||||
|
$(eval $(call run-bash,$1.test,$1 $(test-deps),mk/run-test.sh $1 $2))
|
||||||
|
$(eval $(call run-bash,$1.test-debug,$1 $(test-deps),mk/debug-test.sh $1 $2))
|
||||||
|
|
||||||
|
endef
|
||||||
|
|
||||||
|
define run-test-group
|
||||||
|
|
||||||
.PHONY: $1.test-group
|
.PHONY: $1.test-group
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,6 @@
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "util.hh"
|
|
||||||
#include "crypto.hh"
|
#include "crypto.hh"
|
||||||
|
|
||||||
#include <sodium.h>
|
#include <sodium.h>
|
||||||
|
|
|
@ -19,6 +19,14 @@ set __ETC_PROFILE_NIX_SOURCED 1
|
||||||
|
|
||||||
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Populate bash completions, .desktop files, etc
|
||||||
|
if test -z "$XDG_DATA_DIRS"
|
||||||
|
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||||
|
set --export XDG_DATA_DIRS "/usr/local/share:/usr/share:/nix/var/nix/profiles/default/share"
|
||||||
|
else
|
||||||
|
set --export XDG_DATA_DIRS "$XDG_DATA_DIRS:/nix/var/nix/profiles/default/share"
|
||||||
|
end
|
||||||
|
|
||||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
if test -n "$NIX_SSH_CERT_FILE"
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
|
|
@ -30,6 +30,14 @@ fi
|
||||||
|
|
||||||
export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK"
|
export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK"
|
||||||
|
|
||||||
|
# Populate bash completions, .desktop files, etc
|
||||||
|
if [ -z "$XDG_DATA_DIRS" ]; then
|
||||||
|
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||||
|
export XDG_DATA_DIRS="/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||||
|
else
|
||||||
|
export XDG_DATA_DIRS="$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||||
|
fi
|
||||||
|
|
||||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
if [ -n "${NIX_SSL_CERT_FILE:-}" ]; then
|
if [ -n "${NIX_SSL_CERT_FILE:-}" ]; then
|
||||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
|
|
@ -20,6 +20,14 @@ if test -n "$HOME" && test -n "$USER"
|
||||||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||||
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Populate bash completions, .desktop files, etc
|
||||||
|
if test -z "$XDG_DATA_DIRS"
|
||||||
|
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||||
|
set --export XDG_DATA_DIRS "/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||||
|
else
|
||||||
|
set --export XDG_DATA_DIRS "$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||||
|
end
|
||||||
|
|
||||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
if test -n "$NIX_SSH_CERT_FILE"
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
|
|
@ -32,6 +32,14 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
||||||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||||
export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK"
|
export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_LINK"
|
||||||
|
|
||||||
|
# Populate bash completions, .desktop files, etc
|
||||||
|
if [ -z "$XDG_DATA_DIRS" ]; then
|
||||||
|
# According to XDG spec the default is /usr/local/share:/usr/share, don't set something that prevents that default
|
||||||
|
export XDG_DATA_DIRS="/usr/local/share:/usr/share:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||||
|
else
|
||||||
|
export XDG_DATA_DIRS="$XDG_DATA_DIRS:$NIX_LINK/share:/nix/var/nix/profiles/default/share"
|
||||||
|
fi
|
||||||
|
|
||||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
|
if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
#include "realisation.hh"
|
#include "realisation.hh"
|
||||||
|
|
||||||
|
|
|
@ -175,7 +175,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
|
||||||
throw UsageError("'--all' does not expect arguments");
|
throw UsageError("'--all' does not expect arguments");
|
||||||
// XXX: Only uses opaque paths, ignores all the realisations
|
// XXX: Only uses opaque paths, ignores all the realisations
|
||||||
for (auto & p : store->queryAllValidPaths())
|
for (auto & p : store->queryAllValidPaths())
|
||||||
paths.push_back(BuiltPath::Opaque{p});
|
paths.emplace_back(BuiltPath::Opaque{p});
|
||||||
} else {
|
} else {
|
||||||
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
|
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
|
||||||
if (recursive) {
|
if (recursive) {
|
||||||
|
@ -188,7 +188,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
|
||||||
}
|
}
|
||||||
store->computeFSClosure(pathsRoots, pathsClosure);
|
store->computeFSClosure(pathsRoots, pathsClosure);
|
||||||
for (auto & path : pathsClosure)
|
for (auto & path : pathsClosure)
|
||||||
paths.push_back(BuiltPath::Opaque{path});
|
paths.emplace_back(BuiltPath::Opaque{path});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -375,6 +375,12 @@ public:
|
||||||
const std::string selfCommandName;
|
const std::string selfCommandName;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
void completeFlakeInputPath(
|
||||||
|
AddCompletions & completions,
|
||||||
|
ref<EvalState> evalState,
|
||||||
|
const std::vector<FlakeRef> & flakeRefs,
|
||||||
|
std::string_view prefix);
|
||||||
|
|
||||||
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix);
|
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix);
|
||||||
|
|
||||||
void completeFlakeRefWithFragment(
|
void completeFlakeRefWithFragment(
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
#include "common-eval-args.hh"
|
#include "common-eval-args.hh"
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "filetransfer.hh"
|
#include "filetransfer.hh"
|
||||||
#include "util.hh"
|
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "registry.hh"
|
#include "registry.hh"
|
||||||
|
@ -165,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
return res.finish();
|
return res.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
|
||||||
{
|
{
|
||||||
if (EvalSettings::isPseudoUrl(s)) {
|
if (EvalSettings::isPseudoUrl(s)) {
|
||||||
auto storePath = fetchers::downloadTarball(
|
auto storePath = fetchers::downloadTarball(
|
||||||
|
@ -186,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
return state.rootPath(CanonPath::fromCwd(s));
|
return state.rootPath(CanonPath(s, baseDir));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "args.hh"
|
#include "args.hh"
|
||||||
|
#include "canon-path.hh"
|
||||||
#include "common-args.hh"
|
#include "common-args.hh"
|
||||||
#include "search-path.hh"
|
#include "search-path.hh"
|
||||||
|
|
||||||
|
@ -28,6 +29,6 @@ private:
|
||||||
std::map<std::string, std::string> autoArgs;
|
std::map<std::string, std::string> autoArgs;
|
||||||
};
|
};
|
||||||
|
|
||||||
SourcePath lookupFileArg(EvalState & state, std::string_view s);
|
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#include "util.hh"
|
|
||||||
#include "editor-for.hh"
|
#include "editor-for.hh"
|
||||||
|
#include "environment-variables.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "installable-value.hh"
|
#include "installable-value.hh"
|
||||||
#include "outputs-spec.hh"
|
#include "outputs-spec.hh"
|
||||||
#include "util.hh"
|
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
#include "common-eval-args.hh"
|
#include "common-eval-args.hh"
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "installable-attr-path.hh"
|
#include "installable-attr-path.hh"
|
||||||
#include "installable-flake.hh"
|
#include "installable-flake.hh"
|
||||||
#include "outputs-spec.hh"
|
#include "outputs-spec.hh"
|
||||||
|
#include "users.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
|
@ -32,7 +33,7 @@ const static std::regex attrPathRegex(
|
||||||
R"((?:[a-zA-Z0-9_"-][a-zA-Z0-9_".,^\*-]*))",
|
R"((?:[a-zA-Z0-9_"-][a-zA-Z0-9_".,^\*-]*))",
|
||||||
std::regex::ECMAScript);
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
static void completeFlakeInputPath(
|
void completeFlakeInputPath(
|
||||||
AddCompletions & completions,
|
AddCompletions & completions,
|
||||||
ref<EvalState> evalState,
|
ref<EvalState> evalState,
|
||||||
const std::vector<FlakeRef> & flakeRefs,
|
const std::vector<FlakeRef> & flakeRefs,
|
||||||
|
@ -50,13 +51,6 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
{
|
{
|
||||||
auto category = "Common flake-related options";
|
auto category = "Common flake-related options";
|
||||||
|
|
||||||
addFlag({
|
|
||||||
.longName = "recreate-lock-file",
|
|
||||||
.description = "Recreate the flake's lock file from scratch.",
|
|
||||||
.category = category,
|
|
||||||
.handler = {&lockFlags.recreateLockFile, true}
|
|
||||||
});
|
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-update-lock-file",
|
.longName = "no-update-lock-file",
|
||||||
.description = "Do not allow any updates to the flake's lock file.",
|
.description = "Do not allow any updates to the flake's lock file.",
|
||||||
|
@ -89,19 +83,6 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
.handler = {&lockFlags.commitLockFile, true}
|
.handler = {&lockFlags.commitLockFile, true}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
|
||||||
.longName = "update-input",
|
|
||||||
.description = "Update a specific flake input (ignoring its previous entry in the lock file).",
|
|
||||||
.category = category,
|
|
||||||
.labels = {"input-path"},
|
|
||||||
.handler = {[&](std::string s) {
|
|
||||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
|
||||||
}},
|
|
||||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
|
||||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
|
||||||
}}
|
|
||||||
});
|
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "override-input",
|
.longName = "override-input",
|
||||||
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
|
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
|
||||||
|
@ -111,7 +92,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
lockFlags.writeLockFile = false;
|
lockFlags.writeLockFile = false;
|
||||||
lockFlags.inputOverrides.insert_or_assign(
|
lockFlags.inputOverrides.insert_or_assign(
|
||||||
flake::parseInputPath(inputPath),
|
flake::parseInputPath(inputPath),
|
||||||
parseFlakeRef(flakeRef, absPath("."), true));
|
parseFlakeRef(flakeRef, absPath(getCommandBaseDir()), true));
|
||||||
}},
|
}},
|
||||||
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
|
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
|
||||||
if (n == 0) {
|
if (n == 0) {
|
||||||
|
@ -153,7 +134,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
auto evalState = getEvalState();
|
auto evalState = getEvalState();
|
||||||
auto flake = flake::lockFlake(
|
auto flake = flake::lockFlake(
|
||||||
*evalState,
|
*evalState,
|
||||||
parseFlakeRef(flakeRef, absPath(".")),
|
parseFlakeRef(flakeRef, absPath(getCommandBaseDir())),
|
||||||
{ .writeLockFile = false });
|
{ .writeLockFile = false });
|
||||||
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
|
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
|
||||||
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
|
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
|
||||||
|
@ -387,6 +368,8 @@ void completeFlakeRefWithFragment(
|
||||||
isAttrPath
|
isAttrPath
|
||||||
? std::string("flake:default")
|
? std::string("flake:default")
|
||||||
: std::string(prefix.substr(0, hash));
|
: std::string(prefix.substr(0, hash));
|
||||||
|
|
||||||
|
// TODO: ideally this would use the command base directory instead of assuming ".".
|
||||||
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
||||||
|
|
||||||
auto evalCache = openEvalCache(*evalState,
|
auto evalCache = openEvalCache(*evalState,
|
||||||
|
@ -573,12 +556,13 @@ Installables SourceExprCommand::parseInstallables(
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
else if (file)
|
else if (file)
|
||||||
state->evalFile(lookupFileArg(*state, *file), *vFile);
|
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
|
||||||
else if (callPackageFile) {
|
else if (callPackageFile) {
|
||||||
auto e = state->parseExprFromString(fmt("(import <nixpkgs> {}).callPackage %s {}", CanonPath::fromCwd(*callPackageFile)), state->rootPath(CanonPath::fromCwd()));
|
auto e = state->parseExprFromString(fmt("(import <nixpkgs> {}).callPackage %s {}", CanonPath::fromCwd(*callPackageFile)), state->rootPath(CanonPath::fromCwd()));
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
} else {
|
} else {
|
||||||
auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd()));
|
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
|
||||||
|
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -629,7 +613,7 @@ Installables SourceExprCommand::parseInstallables(
|
||||||
|
|
||||||
auto actualRef = isAttrPath ? "flake:default#" + prefixS : prefixS;
|
auto actualRef = isAttrPath ? "flake:default#" + prefixS : prefixS;
|
||||||
|
|
||||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(actualRef, absPath("."));
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(actualRef, absPath(getCommandBaseDir()));
|
||||||
|
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
|
|
||||||
|
@ -829,7 +813,7 @@ BuiltPaths Installable::toBuiltPaths(
|
||||||
|
|
||||||
BuiltPaths res;
|
BuiltPaths res;
|
||||||
for (auto & drvPath : Installable::toDerivations(store, installables, true))
|
for (auto & drvPath : Installable::toDerivations(store, installables, true))
|
||||||
res.push_back(BuiltPath::Opaque{drvPath});
|
res.emplace_back(BuiltPath::Opaque{drvPath});
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -919,7 +903,7 @@ std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
|
||||||
for (auto i : rawInstallables)
|
for (auto i : rawInstallables)
|
||||||
res.push_back(parseFlakeRefWithFragment(
|
res.push_back(parseFlakeRefWithFragment(
|
||||||
expandTilde(i),
|
expandTilde(i),
|
||||||
absPath(".")).first);
|
absPath(getCommandBaseDir())).first);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -941,7 +925,7 @@ std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
|
||||||
return {
|
return {
|
||||||
parseFlakeRefWithFragment(
|
parseFlakeRefWithFragment(
|
||||||
expandTilde(_installable),
|
expandTilde(_installable),
|
||||||
absPath(".")).first
|
absPath(getCommandBaseDir())).first
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "util.hh"
|
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "outputs-spec.hh"
|
#include "outputs-spec.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "markdown.hh"
|
#include "markdown.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "finally.hh"
|
#include "finally.hh"
|
||||||
|
#include "terminal.hh"
|
||||||
|
|
||||||
#include <sys/queue.h>
|
#include <sys/queue.h>
|
||||||
#include <lowdown.h>
|
#include <lowdown.h>
|
||||||
|
|
|
@ -22,6 +22,7 @@ extern "C" {
|
||||||
#include "repl.hh"
|
#include "repl.hh"
|
||||||
|
|
||||||
#include "ansicolor.hh"
|
#include "ansicolor.hh"
|
||||||
|
#include "signals.hh"
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "eval-cache.hh"
|
#include "eval-cache.hh"
|
||||||
|
@ -36,6 +37,8 @@ extern "C" {
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "flake/flake.hh"
|
#include "flake/flake.hh"
|
||||||
#include "flake/lockfile.hh"
|
#include "flake/lockfile.hh"
|
||||||
|
#include "users.hh"
|
||||||
|
#include "terminal.hh"
|
||||||
#include "editor-for.hh"
|
#include "editor-for.hh"
|
||||||
#include "finally.hh"
|
#include "finally.hh"
|
||||||
#include "markdown.hh"
|
#include "markdown.hh"
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "util.hh"
|
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "users.hh"
|
||||||
#include "eval-cache.hh"
|
#include "eval-cache.hh"
|
||||||
#include "sqlite.hh"
|
#include "sqlite.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "users.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "profiles.hh"
|
#include "profiles.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
#include "config.hh"
|
#include "config.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "eval-settings.hh"
|
#include "eval-settings.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
|
#include "primops.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
@ -14,6 +15,7 @@
|
||||||
#include "print.hh"
|
#include "print.hh"
|
||||||
#include "fs-input-accessor.hh"
|
#include "fs-input-accessor.hh"
|
||||||
#include "memory-input-accessor.hh"
|
#include "memory-input-accessor.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
@ -721,6 +723,23 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void PrimOp::check()
|
||||||
|
{
|
||||||
|
if (arity > maxPrimOpArity) {
|
||||||
|
throw Error("primop arity must not exceed %1%", maxPrimOpArity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void Value::mkPrimOp(PrimOp * p)
|
||||||
|
{
|
||||||
|
p->check();
|
||||||
|
clearValue();
|
||||||
|
internalType = tPrimOp;
|
||||||
|
primOp = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Value * EvalState::addPrimOp(PrimOp && primOp)
|
Value * EvalState::addPrimOp(PrimOp && primOp)
|
||||||
{
|
{
|
||||||
/* Hack to make constants lazy: turn them into a application of
|
/* Hack to make constants lazy: turn them into a application of
|
||||||
|
@ -1747,6 +1766,12 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
||||||
Value vFun;
|
Value vFun;
|
||||||
fun->eval(state, env, vFun);
|
fun->eval(state, env, vFun);
|
||||||
|
|
||||||
|
// Empirical arity of Nixpkgs lambdas by regex e.g. ([a-zA-Z]+:(\s|(/\*.*\/)|(#.*\n))*){5}
|
||||||
|
// 2: over 4000
|
||||||
|
// 3: about 300
|
||||||
|
// 4: about 60
|
||||||
|
// 5: under 10
|
||||||
|
// This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total.
|
||||||
Value * vArgs[args.size()];
|
Value * vArgs[args.size()];
|
||||||
for (size_t i = 0; i < args.size(); ++i)
|
for (size_t i = 0; i < args.size(); ++i)
|
||||||
vArgs[i] = args[i]->maybeThunk(state, env);
|
vArgs[i] = args[i]->maybeThunk(state, env);
|
||||||
|
|
|
@ -18,6 +18,12 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We put a limit on primop arity because it lets us use a fixed size array on
|
||||||
|
* the stack. 8 is already an impractical number of arguments. Use an attrset
|
||||||
|
* argument for such overly complicated functions.
|
||||||
|
*/
|
||||||
|
constexpr size_t maxPrimOpArity = 8;
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
class EvalState;
|
class EvalState;
|
||||||
|
@ -71,6 +77,12 @@ struct PrimOp
|
||||||
* Optional experimental for this to be gated on.
|
* Optional experimental for this to be gated on.
|
||||||
*/
|
*/
|
||||||
std::optional<ExperimentalFeature> experimentalFeature;
|
std::optional<ExperimentalFeature> experimentalFeature;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validity check to be performed by functions that introduce primops,
|
||||||
|
* such as RegisterPrimOp() and Value::mkPrimOp().
|
||||||
|
*/
|
||||||
|
void check();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -827,7 +839,7 @@ std::string showType(const Value & v);
|
||||||
/**
|
/**
|
||||||
* If `path` refers to a directory, then append "/default.nix".
|
* If `path` refers to a directory, then append "/default.nix".
|
||||||
*/
|
*/
|
||||||
SourcePath resolveExprPath(const SourcePath & path);
|
SourcePath resolveExprPath(SourcePath path);
|
||||||
|
|
||||||
struct InvalidPathError : EvalError
|
struct InvalidPathError : EvalError
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "flake.hh"
|
#include "users.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "fetch-settings.hh"
|
#include "fetch-settings.hh"
|
||||||
|
#include "flake.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "terminal.hh"
|
||||||
#include "flake.hh"
|
#include "flake.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "eval-settings.hh"
|
#include "eval-settings.hh"
|
||||||
|
@ -8,6 +9,7 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "finally.hh"
|
#include "finally.hh"
|
||||||
#include "fetch-settings.hh"
|
#include "fetch-settings.hh"
|
||||||
|
#include "value-to-json.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -140,8 +142,13 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
|
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
if (attr.name == state.symbols.create("publicKeys")) {
|
||||||
state.symbols[attr.name], showType(*attr.value));
|
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||||
|
NixStringContext emptyContext = {};
|
||||||
|
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
|
||||||
|
} else
|
||||||
|
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||||
|
state.symbols[attr.name], showType(*attr.value));
|
||||||
}
|
}
|
||||||
#pragma GCC diagnostic pop
|
#pragma GCC diagnostic pop
|
||||||
}
|
}
|
||||||
|
@ -447,8 +454,8 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
assert(input.ref);
|
assert(input.ref);
|
||||||
|
|
||||||
/* Do we have an entry in the existing lock file? And we
|
/* Do we have an entry in the existing lock file?
|
||||||
don't have a --update-input flag for this input? */
|
And the input is not in updateInputs? */
|
||||||
std::shared_ptr<LockedNode> oldLock;
|
std::shared_ptr<LockedNode> oldLock;
|
||||||
|
|
||||||
updatesUsed.insert(inputPath);
|
updatesUsed.insert(inputPath);
|
||||||
|
@ -472,9 +479,8 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
node->inputs.insert_or_assign(id, childNode);
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
|
|
||||||
/* If we have an --update-input flag for an input
|
/* If we have this input in updateInputs, then we
|
||||||
of this input, then we must fetch the flake to
|
must fetch the flake to update it. */
|
||||||
update it. */
|
|
||||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||||
|
|
||||||
auto mustRefetch =
|
auto mustRefetch =
|
||||||
|
@ -616,19 +622,14 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
for (auto & i : lockFlags.inputUpdates)
|
for (auto & i : lockFlags.inputUpdates)
|
||||||
if (!updatesUsed.count(i))
|
if (!updatesUsed.count(i))
|
||||||
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
|
warn("'%s' does not match any input of this flake", printInputPath(i));
|
||||||
|
|
||||||
/* Check 'follows' inputs. */
|
/* Check 'follows' inputs. */
|
||||||
newLockFile.check();
|
newLockFile.check();
|
||||||
|
|
||||||
debug("new lock file: %s", newLockFile);
|
debug("new lock file: %s", newLockFile);
|
||||||
|
|
||||||
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
|
|
||||||
auto sourcePath = topRef.input.getSourcePath();
|
auto sourcePath = topRef.input.getSourcePath();
|
||||||
auto outputLockFilePath = sourcePath ? std::optional{*sourcePath + "/" + relPath} : std::nullopt;
|
|
||||||
if (lockFlags.outputLockFilePath) {
|
|
||||||
outputLockFilePath = lockFlags.outputLockFilePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Check whether we need to / can write the new lock file. */
|
/* Check whether we need to / can write the new lock file. */
|
||||||
if (newLockFile != oldLockFile || lockFlags.outputLockFilePath) {
|
if (newLockFile != oldLockFile || lockFlags.outputLockFilePath) {
|
||||||
|
@ -636,7 +637,7 @@ LockedFlake lockFlake(
|
||||||
auto diff = LockFile::diff(oldLockFile, newLockFile);
|
auto diff = LockFile::diff(oldLockFile, newLockFile);
|
||||||
|
|
||||||
if (lockFlags.writeLockFile) {
|
if (lockFlags.writeLockFile) {
|
||||||
if (outputLockFilePath) {
|
if (sourcePath || lockFlags.outputLockFilePath) {
|
||||||
if (auto unlockedInput = newLockFile.isUnlocked()) {
|
if (auto unlockedInput = newLockFile.isUnlocked()) {
|
||||||
if (fetchSettings.warnDirty)
|
if (fetchSettings.warnDirty)
|
||||||
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
||||||
|
@ -644,41 +645,48 @@ LockedFlake lockFlake(
|
||||||
if (!lockFlags.updateLockFile)
|
if (!lockFlags.updateLockFile)
|
||||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||||
|
|
||||||
bool lockFileExists = pathExists(*outputLockFilePath);
|
auto newLockFileS = fmt("%s\n", newLockFile);
|
||||||
|
|
||||||
|
if (lockFlags.outputLockFilePath) {
|
||||||
|
if (lockFlags.commitLockFile)
|
||||||
|
throw Error("'--commit-lock-file' and '--output-lock-file' are incompatible");
|
||||||
|
writeFile(*lockFlags.outputLockFilePath, newLockFileS);
|
||||||
|
} else {
|
||||||
|
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
|
||||||
|
auto outputLockFilePath = *sourcePath + "/" + relPath;
|
||||||
|
|
||||||
|
bool lockFileExists = pathExists(outputLockFilePath);
|
||||||
|
|
||||||
if (lockFileExists) {
|
|
||||||
auto s = chomp(diff);
|
auto s = chomp(diff);
|
||||||
if (s.empty())
|
if (lockFileExists) {
|
||||||
warn("updating lock file '%s'", *outputLockFilePath);
|
if (s.empty())
|
||||||
else
|
warn("updating lock file '%s'", outputLockFilePath);
|
||||||
warn("updating lock file '%s':\n%s", *outputLockFilePath, s);
|
else
|
||||||
} else
|
warn("updating lock file '%s':\n%s", outputLockFilePath, s);
|
||||||
warn("creating lock file '%s'", *outputLockFilePath);
|
} else
|
||||||
|
warn("creating lock file '%s': \n%s", outputLockFilePath, s);
|
||||||
|
|
||||||
newLockFile.write(*outputLockFilePath);
|
std::optional<std::string> commitMessage = std::nullopt;
|
||||||
|
|
||||||
std::optional<std::string> commitMessage = std::nullopt;
|
if (lockFlags.commitLockFile) {
|
||||||
if (lockFlags.commitLockFile) {
|
std::string cm;
|
||||||
if (lockFlags.outputLockFilePath) {
|
|
||||||
throw Error("--commit-lock-file and --output-lock-file are currently incompatible");
|
|
||||||
}
|
|
||||||
std::string cm;
|
|
||||||
|
|
||||||
cm = fetchSettings.commitLockFileSummary.get();
|
cm = fetchSettings.commitLockFileSummary.get();
|
||||||
|
|
||||||
if (cm == "") {
|
if (cm == "") {
|
||||||
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
|
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
|
||||||
|
}
|
||||||
|
|
||||||
|
cm += "\n\nFlake lock file updates:\n\n";
|
||||||
|
cm += filterANSIEscapes(diff, true);
|
||||||
|
commitMessage = cm;
|
||||||
}
|
}
|
||||||
|
|
||||||
cm += "\n\nFlake lock file updates:\n\n";
|
topRef.input.putFile(
|
||||||
cm += filterANSIEscapes(diff, true);
|
CanonPath((topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock"),
|
||||||
commitMessage = cm;
|
newLockFileS, commitMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
topRef.input.markChangedFile(
|
|
||||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
|
||||||
commitMessage);
|
|
||||||
|
|
||||||
/* Rewriting the lockfile changed the top-level
|
/* Rewriting the lockfile changed the top-level
|
||||||
repo, so we should re-read it. FIXME: we could
|
repo, so we should re-read it. FIXME: we could
|
||||||
also just clear the 'rev' field... */
|
also just clear the 'rev' field... */
|
||||||
|
|
|
@ -214,12 +214,6 @@ std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
|
||||||
return stream;
|
return stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
void LockFile::write(const Path & path) const
|
|
||||||
{
|
|
||||||
createDirs(dirOf(path));
|
|
||||||
writeFile(path, fmt("%s\n", *this));
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<FlakeRef> LockFile::isUnlocked() const
|
std::optional<FlakeRef> LockFile::isUnlocked() const
|
||||||
{
|
{
|
||||||
std::set<ref<const Node>> nodes;
|
std::set<ref<const Node>> nodes;
|
||||||
|
|
|
@ -65,8 +65,6 @@ struct LockFile
|
||||||
|
|
||||||
static LockFile read(const Path & path);
|
static LockFile read(const Path & path);
|
||||||
|
|
||||||
void write(const Path & path) const;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether this lock file has any unlocked inputs.
|
* Check whether this lock file has any unlocked inputs.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#include "get-drvs.hh"
|
#include "get-drvs.hh"
|
||||||
#include "util.hh"
|
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
|
|
@ -43,7 +43,9 @@ $(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||||
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||||
|
|
||||||
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
|
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
||||||
|
|
||||||
|
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
|
||||||
|
|
||||||
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
|
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
#include "users.hh"
|
||||||
|
|
||||||
#include "nixexpr.hh"
|
#include "nixexpr.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
|
@ -687,17 +688,25 @@ Expr * EvalState::parse(
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SourcePath resolveExprPath(const SourcePath & path)
|
SourcePath resolveExprPath(SourcePath path)
|
||||||
{
|
{
|
||||||
|
unsigned int followCount = 0, maxFollow = 1024;
|
||||||
|
|
||||||
/* If `path' is a symlink, follow it. This is so that relative
|
/* If `path' is a symlink, follow it. This is so that relative
|
||||||
path references work. */
|
path references work. */
|
||||||
auto path2 = path.resolveSymlinks();
|
while (true) {
|
||||||
|
// Basic cycle/depth limit to avoid infinite loops.
|
||||||
|
if (++followCount >= maxFollow)
|
||||||
|
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||||
|
if (path.lstat().type != InputAccessor::tSymlink) break;
|
||||||
|
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||||
|
}
|
||||||
|
|
||||||
/* If `path' refers to a directory, append `/default.nix'. */
|
/* If `path' refers to a directory, append `/default.nix'. */
|
||||||
if (path2.lstat().type == InputAccessor::tDirectory)
|
if (path.lstat().type == InputAccessor::tDirectory)
|
||||||
return path2 + "default.nix";
|
return path + "default.nix";
|
||||||
|
|
||||||
return path2;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
#include "path-references.hh"
|
#include "path-references.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
#include "processes.hh"
|
||||||
#include "value-to-json.hh"
|
#include "value-to-json.hh"
|
||||||
#include "value-to-xml.hh"
|
#include "value-to-xml.hh"
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
|
@ -28,7 +29,6 @@
|
||||||
|
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
@ -824,7 +824,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
||||||
auto message = state.coerceToString(pos, *args[0], context,
|
auto message = state.coerceToString(pos, *args[0], context,
|
||||||
"while evaluating the error message passed to builtins.addErrorContext",
|
"while evaluating the error message passed to builtins.addErrorContext",
|
||||||
false, false).toOwned();
|
false, false).toOwned();
|
||||||
e.addTrace(nullptr, message, true);
|
e.addTrace(nullptr, hintfmt(message), true);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1548,10 +1548,8 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto checked = state.checkSourcePath(path);
|
auto checked = state.checkSourcePath(path);
|
||||||
auto exists = checked.pathExists();
|
auto st = checked.maybeLstat();
|
||||||
if (exists && mustBeDir) {
|
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
||||||
exists = checked.lstat().type == InputAccessor::tDirectory;
|
|
||||||
}
|
|
||||||
v.mkBool(exists);
|
v.mkBool(exists);
|
||||||
} catch (SysError & e) {
|
} catch (SysError & e) {
|
||||||
/* Don't give away info from errors while canonicalising
|
/* Don't give away info from errors while canonicalising
|
||||||
|
@ -2376,7 +2374,7 @@ static RegisterPrimOp primop_path({
|
||||||
like `@`.
|
like `@`.
|
||||||
|
|
||||||
- filter\
|
- filter\
|
||||||
A function of the type expected by `builtins.filterSource`,
|
A function of the type expected by [`builtins.filterSource`](#builtins-filterSource),
|
||||||
with the same semantics.
|
with the same semantics.
|
||||||
|
|
||||||
- recursive\
|
- recursive\
|
||||||
|
@ -2551,6 +2549,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args
|
||||||
/* Get the attribute names to be removed.
|
/* Get the attribute names to be removed.
|
||||||
We keep them as Attrs instead of Symbols so std::set_difference
|
We keep them as Attrs instead of Symbols so std::set_difference
|
||||||
can be used to remove them from attrs[0]. */
|
can be used to remove them from attrs[0]. */
|
||||||
|
// 64: large enough to fit the attributes of a derivation
|
||||||
boost::container::small_vector<Attr, 64> names;
|
boost::container::small_vector<Attr, 64> names;
|
||||||
names.reserve(args[1]->listSize());
|
names.reserve(args[1]->listSize());
|
||||||
for (auto elem : args[1]->listItems()) {
|
for (auto elem : args[1]->listItems()) {
|
||||||
|
@ -2731,7 +2730,7 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
|
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
|
||||||
|
|
||||||
Value * res[args[1]->listSize()];
|
Value * res[args[1]->listSize()];
|
||||||
unsigned int found = 0;
|
size_t found = 0;
|
||||||
|
|
||||||
for (auto v2 : args[1]->listItems()) {
|
for (auto v2 : args[1]->listItems()) {
|
||||||
state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs");
|
state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs");
|
||||||
|
@ -3067,7 +3066,7 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||||
|
|
||||||
// FIXME: putting this on the stack is risky.
|
// FIXME: putting this on the stack is risky.
|
||||||
Value * vs[args[1]->listSize()];
|
Value * vs[args[1]->listSize()];
|
||||||
unsigned int k = 0;
|
size_t k = 0;
|
||||||
|
|
||||||
bool same = true;
|
bool same = true;
|
||||||
for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
|
for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
|
||||||
|
@ -3192,10 +3191,14 @@ static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * ar
|
||||||
state.forceFunction(*args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all"));
|
state.forceFunction(*args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all"));
|
||||||
state.forceList(*args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all"));
|
state.forceList(*args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all"));
|
||||||
|
|
||||||
|
std::string_view errorCtx = any
|
||||||
|
? "while evaluating the return value of the function passed to builtins.any"
|
||||||
|
: "while evaluating the return value of the function passed to builtins.all";
|
||||||
|
|
||||||
Value vTmp;
|
Value vTmp;
|
||||||
for (auto elem : args[1]->listItems()) {
|
for (auto elem : args[1]->listItems()) {
|
||||||
state.callFunction(*args[0], *elem, vTmp, pos);
|
state.callFunction(*args[0], *elem, vTmp, pos);
|
||||||
bool res = state.forceBool(vTmp, pos, std::string("while evaluating the return value of the function passed to builtins.") + (any ? "any" : "all"));
|
bool res = state.forceBool(vTmp, pos, errorCtx);
|
||||||
if (res == any) {
|
if (res == any) {
|
||||||
v.mkBool(any);
|
v.mkBool(any);
|
||||||
return;
|
return;
|
||||||
|
@ -3457,7 +3460,7 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
for (unsigned int n = 0; n < nrLists; ++n) {
|
for (unsigned int n = 0; n < nrLists; ++n) {
|
||||||
Value * vElem = args[1]->listElems()[n];
|
Value * vElem = args[1]->listElems()[n];
|
||||||
state.callFunction(*args[0], *vElem, lists[n], pos);
|
state.callFunction(*args[0], *vElem, lists[n], pos);
|
||||||
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
|
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to builtins.concatMap");
|
||||||
len += lists[n].listSize();
|
len += lists[n].listSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,22 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For functions where we do not expect deep recursion, we can use a sizable
|
||||||
|
* part of the stack a free allocation space.
|
||||||
|
*
|
||||||
|
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
|
||||||
|
*/
|
||||||
|
constexpr size_t nonRecursiveStackReservation = 128;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Functions that maybe applied to self-similar inputs, such as concatMap on a
|
||||||
|
* tree, should reserve a smaller part of the stack for allocation.
|
||||||
|
*
|
||||||
|
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
|
||||||
|
*/
|
||||||
|
constexpr size_t conservativeStackReservation = 16;
|
||||||
|
|
||||||
struct RegisterPrimOp
|
struct RegisterPrimOp
|
||||||
{
|
{
|
||||||
typedef std::vector<PrimOp> PrimOps;
|
typedef std::vector<PrimOp> PrimOps;
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#include "registry.hh"
|
#include "registry.hh"
|
||||||
#include "tarball.hh"
|
#include "tarball.hh"
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
|
#include "value-to-json.hh"
|
||||||
|
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
|
@ -125,6 +126,10 @@ static void fetchTree(
|
||||||
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean});
|
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean});
|
||||||
else if (attr.value->type() == nInt)
|
else if (attr.value->type() == nInt)
|
||||||
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
|
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
|
||||||
|
else if (state.symbols[attr.name] == "publicKeys") {
|
||||||
|
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||||
|
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||||
|
}
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||||
state.symbols[attr.name], showType(*attr.value)));
|
state.symbols[attr.name], showType(*attr.value)));
|
||||||
|
@ -223,6 +228,7 @@ static RegisterPrimOp primop_fetchTree({
|
||||||
```
|
```
|
||||||
)",
|
)",
|
||||||
.fun = prim_fetchTree,
|
.fun = prim_fetchTree,
|
||||||
|
.experimentalFeature = Xp::FetchTree,
|
||||||
});
|
});
|
||||||
|
|
||||||
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
||||||
|
@ -419,7 +425,8 @@ static RegisterPrimOp primop_fetchGit({
|
||||||
|
|
||||||
- `shallow` (default: `false`)
|
- `shallow` (default: `false`)
|
||||||
|
|
||||||
A Boolean parameter that specifies whether fetching a shallow clone is allowed.
|
A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed.
|
||||||
|
This still performs a full clone of what is available on the remote.
|
||||||
|
|
||||||
- `allRefs`
|
- `allRefs`
|
||||||
|
|
||||||
|
@ -427,6 +434,42 @@ static RegisterPrimOp primop_fetchGit({
|
||||||
With this argument being true, it's possible to load a `rev` from *any* `ref`
|
With this argument being true, it's possible to load a `rev` from *any* `ref`
|
||||||
(by default only `rev`s from the specified `ref` are supported).
|
(by default only `rev`s from the specified `ref` are supported).
|
||||||
|
|
||||||
|
- `verifyCommit` (default: `true` if `publicKey` or `publicKeys` are provided, otherwise `false`)
|
||||||
|
|
||||||
|
Whether to check `rev` for a signature matching `publicKey` or `publicKeys`.
|
||||||
|
If `verifyCommit` is enabled, then `fetchGit` cannot use a local repository with uncommitted changes.
|
||||||
|
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||||
|
|
||||||
|
- `publicKey`
|
||||||
|
|
||||||
|
The public key against which `rev` is verified if `verifyCommit` is enabled.
|
||||||
|
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||||
|
|
||||||
|
- `keytype` (default: `"ssh-ed25519"`)
|
||||||
|
|
||||||
|
The key type of `publicKey`.
|
||||||
|
Possible values:
|
||||||
|
- `"ssh-dsa"`
|
||||||
|
- `"ssh-ecdsa"`
|
||||||
|
- `"ssh-ecdsa-sk"`
|
||||||
|
- `"ssh-ed25519"`
|
||||||
|
- `"ssh-ed25519-sk"`
|
||||||
|
- `"ssh-rsa"`
|
||||||
|
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||||
|
|
||||||
|
- `publicKeys`
|
||||||
|
|
||||||
|
The public keys against which `rev` is verified if `verifyCommit` is enabled.
|
||||||
|
Must be given as a list of attribute sets with the following form:
|
||||||
|
```nix
|
||||||
|
{
|
||||||
|
key = "<public key>";
|
||||||
|
type = "<key type>"; # optional, default: "ssh-ed25519"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||||
|
|
||||||
|
|
||||||
Here are some examples of how to use `fetchGit`.
|
Here are some examples of how to use `fetchGit`.
|
||||||
|
|
||||||
- To fetch a private repository over SSH:
|
- To fetch a private repository over SSH:
|
||||||
|
@ -501,6 +544,21 @@ static RegisterPrimOp primop_fetchGit({
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- To verify the commit signature:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.fetchGit {
|
||||||
|
url = "ssh://git@github.com/nixos/nix.git";
|
||||||
|
verifyCommit = true;
|
||||||
|
publicKeys = [
|
||||||
|
{
|
||||||
|
type = "ssh-ed25519";
|
||||||
|
key = "AAAAC3NzaC1lZDI1NTE5AAAAIArPKULJOid8eS6XETwUjO48/HKBWl7FTCK0Z//fplDi";
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
|
Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
|
||||||
|
|
||||||
This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#include "search-path.hh"
|
#include "search-path.hh"
|
||||||
#include "util.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -906,12 +906,12 @@ namespace nix {
|
||||||
ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO",
|
ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO",
|
||||||
TypeError,
|
TypeError,
|
||||||
hintfmt("value is %s while a list was expected", "an integer"),
|
hintfmt("value is %s while a list was expected", "an integer"),
|
||||||
hintfmt("while evaluating the return value of the function passed to buitlins.concatMap"));
|
hintfmt("while evaluating the return value of the function passed to builtins.concatMap"));
|
||||||
|
|
||||||
ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO",
|
ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO",
|
||||||
TypeError,
|
TypeError,
|
||||||
hintfmt("value is %s while a list was expected", "a string"),
|
hintfmt("value is %s while a list was expected", "a string"),
|
||||||
hintfmt("while evaluating the return value of the function passed to buitlins.concatMap"));
|
hintfmt("while evaluating the return value of the function passed to builtins.concatMap"));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,11 @@ libexpr-tests_NAME := libnixexpr-tests
|
||||||
|
|
||||||
libexpr-tests_DIR := $(d)
|
libexpr-tests_DIR := $(d)
|
||||||
|
|
||||||
libexpr-tests_INSTALL_DIR :=
|
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||||
|
libexpr-tests_INSTALL_DIR := $(checkbindir)
|
||||||
|
else
|
||||||
|
libexpr-tests_INSTALL_DIR :=
|
||||||
|
endif
|
||||||
|
|
||||||
libexpr-tests_SOURCES := \
|
libexpr-tests_SOURCES := \
|
||||||
$(wildcard $(d)/*.cc) \
|
$(wildcard $(d)/*.cc) \
|
||||||
|
|
|
@ -114,7 +114,8 @@ TEST_F(ValuePrintingTests, vLambda)
|
||||||
TEST_F(ValuePrintingTests, vPrimOp)
|
TEST_F(ValuePrintingTests, vPrimOp)
|
||||||
{
|
{
|
||||||
Value vPrimOp;
|
Value vPrimOp;
|
||||||
vPrimOp.mkPrimOp(nullptr);
|
PrimOp primOp{};
|
||||||
|
vPrimOp.mkPrimOp(&primOp);
|
||||||
|
|
||||||
test(vPrimOp, "<PRIMOP>");
|
test(vPrimOp, "<PRIMOP>");
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#include "value-to-json.hh"
|
#include "value-to-json.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "util.hh"
|
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#include "value-to-xml.hh"
|
#include "value-to-xml.hh"
|
||||||
#include "xml-writer.hh"
|
#include "xml-writer.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "util.hh"
|
#include "signals.hh"
|
||||||
|
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
#include <climits>
|
#include <climits>
|
||||||
|
#include <span>
|
||||||
|
|
||||||
#include "symbol-table.hh"
|
#include "symbol-table.hh"
|
||||||
#include "value/context.hh"
|
#include "value/context.hh"
|
||||||
|
@ -158,42 +159,60 @@ public:
|
||||||
inline bool isPrimOp() const { return internalType == tPrimOp; };
|
inline bool isPrimOp() const { return internalType == tPrimOp; };
|
||||||
inline bool isPrimOpApp() const { return internalType == tPrimOpApp; };
|
inline bool isPrimOpApp() const { return internalType == tPrimOpApp; };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Strings in the evaluator carry a so-called `context` which
|
||||||
|
* is a list of strings representing store paths. This is to
|
||||||
|
* allow users to write things like
|
||||||
|
*
|
||||||
|
* "--with-freetype2-library=" + freetype + "/lib"
|
||||||
|
*
|
||||||
|
* where `freetype` is a derivation (or a source to be copied
|
||||||
|
* to the store). If we just concatenated the strings without
|
||||||
|
* keeping track of the referenced store paths, then if the
|
||||||
|
* string is used as a derivation attribute, the derivation
|
||||||
|
* will not have the correct dependencies in its inputDrvs and
|
||||||
|
* inputSrcs.
|
||||||
|
|
||||||
|
* The semantics of the context is as follows: when a string
|
||||||
|
* with context C is used as a derivation attribute, then the
|
||||||
|
* derivations in C will be added to the inputDrvs of the
|
||||||
|
* derivation, and the other store paths in C will be added to
|
||||||
|
* the inputSrcs of the derivations.
|
||||||
|
|
||||||
|
* For canonicity, the store paths should be in sorted order.
|
||||||
|
*/
|
||||||
|
struct StringWithContext {
|
||||||
|
const char * c_str;
|
||||||
|
const char * * context; // must be in sorted order
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Path {
|
||||||
|
InputAccessor * accessor;
|
||||||
|
const char * path;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ClosureThunk {
|
||||||
|
Env * env;
|
||||||
|
Expr * expr;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct FunctionApplicationThunk {
|
||||||
|
Value * left, * right;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Lambda {
|
||||||
|
Env * env;
|
||||||
|
ExprLambda * fun;
|
||||||
|
};
|
||||||
|
|
||||||
union
|
union
|
||||||
{
|
{
|
||||||
NixInt integer;
|
NixInt integer;
|
||||||
bool boolean;
|
bool boolean;
|
||||||
|
|
||||||
/**
|
StringWithContext string;
|
||||||
* Strings in the evaluator carry a so-called `context` which
|
|
||||||
* is a list of strings representing store paths. This is to
|
|
||||||
* allow users to write things like
|
|
||||||
|
|
||||||
* "--with-freetype2-library=" + freetype + "/lib"
|
Path _path;
|
||||||
|
|
||||||
* where `freetype` is a derivation (or a source to be copied
|
|
||||||
* to the store). If we just concatenated the strings without
|
|
||||||
* keeping track of the referenced store paths, then if the
|
|
||||||
* string is used as a derivation attribute, the derivation
|
|
||||||
* will not have the correct dependencies in its inputDrvs and
|
|
||||||
* inputSrcs.
|
|
||||||
|
|
||||||
* The semantics of the context is as follows: when a string
|
|
||||||
* with context C is used as a derivation attribute, then the
|
|
||||||
* derivations in C will be added to the inputDrvs of the
|
|
||||||
* derivation, and the other store paths in C will be added to
|
|
||||||
* the inputSrcs of the derivations.
|
|
||||||
|
|
||||||
* For canonicity, the store paths should be in sorted order.
|
|
||||||
*/
|
|
||||||
struct {
|
|
||||||
const char * c_str;
|
|
||||||
const char * * context; // must be in sorted order
|
|
||||||
} string;
|
|
||||||
|
|
||||||
struct {
|
|
||||||
InputAccessor * accessor;
|
|
||||||
const char * path;
|
|
||||||
} _path;
|
|
||||||
|
|
||||||
Bindings * attrs;
|
Bindings * attrs;
|
||||||
struct {
|
struct {
|
||||||
|
@ -201,21 +220,11 @@ public:
|
||||||
Value * * elems;
|
Value * * elems;
|
||||||
} bigList;
|
} bigList;
|
||||||
Value * smallList[2];
|
Value * smallList[2];
|
||||||
struct {
|
ClosureThunk thunk;
|
||||||
Env * env;
|
FunctionApplicationThunk app;
|
||||||
Expr * expr;
|
Lambda lambda;
|
||||||
} thunk;
|
|
||||||
struct {
|
|
||||||
Value * left, * right;
|
|
||||||
} app;
|
|
||||||
struct {
|
|
||||||
Env * env;
|
|
||||||
ExprLambda * fun;
|
|
||||||
} lambda;
|
|
||||||
PrimOp * primOp;
|
PrimOp * primOp;
|
||||||
struct {
|
FunctionApplicationThunk primOpApp;
|
||||||
Value * left, * right;
|
|
||||||
} primOpApp;
|
|
||||||
ExternalValueBase * external;
|
ExternalValueBase * external;
|
||||||
NixFloat fpoint;
|
NixFloat fpoint;
|
||||||
};
|
};
|
||||||
|
@ -354,13 +363,7 @@ public:
|
||||||
// Value will be overridden anyways
|
// Value will be overridden anyways
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void mkPrimOp(PrimOp * p)
|
void mkPrimOp(PrimOp * p);
|
||||||
{
|
|
||||||
clearValue();
|
|
||||||
internalType = tPrimOp;
|
|
||||||
primOp = p;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline void mkPrimOpApp(Value * l, Value * r)
|
inline void mkPrimOpApp(Value * l, Value * r)
|
||||||
{
|
{
|
||||||
|
@ -393,7 +396,13 @@ public:
|
||||||
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
|
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Value * const * listElems() const
|
std::span<Value * const> listItems() const
|
||||||
|
{
|
||||||
|
assert(isList());
|
||||||
|
return std::span<Value * const>(listElems(), listSize());
|
||||||
|
}
|
||||||
|
|
||||||
|
Value * const * listElems() const
|
||||||
{
|
{
|
||||||
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
|
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
|
||||||
}
|
}
|
||||||
|
@ -412,34 +421,6 @@ public:
|
||||||
*/
|
*/
|
||||||
bool isTrivial() const;
|
bool isTrivial() const;
|
||||||
|
|
||||||
auto listItems()
|
|
||||||
{
|
|
||||||
struct ListIterable
|
|
||||||
{
|
|
||||||
typedef Value * const * iterator;
|
|
||||||
iterator _begin, _end;
|
|
||||||
iterator begin() const { return _begin; }
|
|
||||||
iterator end() const { return _end; }
|
|
||||||
};
|
|
||||||
assert(isList());
|
|
||||||
auto begin = listElems();
|
|
||||||
return ListIterable { begin, begin + listSize() };
|
|
||||||
}
|
|
||||||
|
|
||||||
auto listItems() const
|
|
||||||
{
|
|
||||||
struct ConstListIterable
|
|
||||||
{
|
|
||||||
typedef const Value * const * iterator;
|
|
||||||
iterator _begin, _end;
|
|
||||||
iterator begin() const { return _begin; }
|
|
||||||
iterator end() const { return _end; }
|
|
||||||
};
|
|
||||||
assert(isList());
|
|
||||||
auto begin = listElems();
|
|
||||||
return ConstListIterable { begin, begin + listSize() };
|
|
||||||
}
|
|
||||||
|
|
||||||
SourcePath path() const
|
SourcePath path() const
|
||||||
{
|
{
|
||||||
assert(internalType == tPath);
|
assert(internalType == tPath);
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "util.hh"
|
||||||
#include "value/context.hh"
|
#include "value/context.hh"
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "util.hh"
|
|
||||||
#include "comparator.hh"
|
#include "comparator.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
#include "variant-wrapper.hh"
|
#include "variant-wrapper.hh"
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#include "cache.hh"
|
#include "cache.hh"
|
||||||
|
#include "users.hh"
|
||||||
#include "sqlite.hh"
|
#include "sqlite.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
@ -19,6 +20,9 @@ create table if not exists Cache (
|
||||||
);
|
);
|
||||||
)sql";
|
)sql";
|
||||||
|
|
||||||
|
// FIXME: we should periodically purge/nuke this cache to prevent it
|
||||||
|
// from growing too big.
|
||||||
|
|
||||||
struct CacheImpl : Cache
|
struct CacheImpl : Cache
|
||||||
{
|
{
|
||||||
struct State
|
struct State
|
||||||
|
@ -47,6 +51,60 @@ struct CacheImpl : Cache
|
||||||
"select info, path, immutable, timestamp from Cache where input = ?");
|
"select info, path, immutable, timestamp from Cache where input = ?");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void upsert(
|
||||||
|
const Attrs & inAttrs,
|
||||||
|
const Attrs & infoAttrs) override
|
||||||
|
{
|
||||||
|
_state.lock()->add.use()
|
||||||
|
(attrsToJSON(inAttrs).dump())
|
||||||
|
(attrsToJSON(infoAttrs).dump())
|
||||||
|
("") // no path
|
||||||
|
(false)
|
||||||
|
(time(0)).exec();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Attrs> lookup(const Attrs & inAttrs) override
|
||||||
|
{
|
||||||
|
if (auto res = lookupExpired(inAttrs))
|
||||||
|
return std::move(res->infoAttrs);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Attrs> lookupWithTTL(const Attrs & inAttrs) override
|
||||||
|
{
|
||||||
|
if (auto res = lookupExpired(inAttrs)) {
|
||||||
|
if (!res->expired)
|
||||||
|
return std::move(res->infoAttrs);
|
||||||
|
debug("ignoring expired cache entry '%s'",
|
||||||
|
attrsToJSON(inAttrs).dump());
|
||||||
|
}
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Result2> lookupExpired(const Attrs & inAttrs) override
|
||||||
|
{
|
||||||
|
auto state(_state.lock());
|
||||||
|
|
||||||
|
auto inAttrsJSON = attrsToJSON(inAttrs).dump();
|
||||||
|
|
||||||
|
auto stmt(state->lookup.use()(inAttrsJSON));
|
||||||
|
if (!stmt.next()) {
|
||||||
|
debug("did not find cache entry for '%s'", inAttrsJSON);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
auto infoJSON = stmt.getStr(0);
|
||||||
|
auto locked = stmt.getInt(2) != 0;
|
||||||
|
auto timestamp = stmt.getInt(3);
|
||||||
|
|
||||||
|
debug("using cache entry '%s' -> '%s'", inAttrsJSON, infoJSON);
|
||||||
|
|
||||||
|
return Result2 {
|
||||||
|
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||||
|
.infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
void add(
|
void add(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const Attrs & inAttrs,
|
const Attrs & inAttrs,
|
||||||
|
|
|
@ -2,13 +2,53 @@
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
|
#include "path.hh"
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A cache for arbitrary `Attrs` -> `Attrs` mappings with a timestamp
|
||||||
|
* for expiration.
|
||||||
|
*/
|
||||||
struct Cache
|
struct Cache
|
||||||
{
|
{
|
||||||
virtual ~Cache() { }
|
virtual ~Cache() { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a value to the cache. The cache is an arbitrary mapping of
|
||||||
|
* Attrs to Attrs.
|
||||||
|
*/
|
||||||
|
virtual void upsert(
|
||||||
|
const Attrs & inAttrs,
|
||||||
|
const Attrs & infoAttrs) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Look up a key with infinite TTL.
|
||||||
|
*/
|
||||||
|
virtual std::optional<Attrs> lookup(
|
||||||
|
const Attrs & inAttrs) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Look up a key. Return nothing if its TTL has exceeded
|
||||||
|
* `settings.tarballTTL`.
|
||||||
|
*/
|
||||||
|
virtual std::optional<Attrs> lookupWithTTL(
|
||||||
|
const Attrs & inAttrs) = 0;
|
||||||
|
|
||||||
|
struct Result2
|
||||||
|
{
|
||||||
|
bool expired = false;
|
||||||
|
Attrs infoAttrs;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Look up a key. Return a bool denoting whether its TTL has
|
||||||
|
* exceeded `settings.tarballTTL`.
|
||||||
|
*/
|
||||||
|
virtual std::optional<Result2> lookupExpired(
|
||||||
|
const Attrs & inAttrs) = 0;
|
||||||
|
|
||||||
|
/* Old cache for things that have a store path. */
|
||||||
virtual void add(
|
virtual void add(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const Attrs & inAttrs,
|
const Attrs & inAttrs,
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
|
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "config.hh"
|
#include "config.hh"
|
||||||
#include "util.hh"
|
|
||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
|
|
|
@ -1,16 +1,36 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "input-accessor.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
std::unique_ptr<std::vector<std::shared_ptr<InputScheme>>> inputSchemes = nullptr;
|
using InputSchemeMap = std::map<std::string_view, std::shared_ptr<InputScheme>>;
|
||||||
|
|
||||||
|
std::unique_ptr<InputSchemeMap> inputSchemes = nullptr;
|
||||||
|
|
||||||
void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
|
void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
|
||||||
{
|
{
|
||||||
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::shared_ptr<InputScheme>>>();
|
if (!inputSchemes)
|
||||||
inputSchemes->push_back(std::move(inputScheme));
|
inputSchemes = std::make_unique<InputSchemeMap>();
|
||||||
|
auto schemeName = inputScheme->schemeName();
|
||||||
|
if (inputSchemes->count(schemeName) > 0)
|
||||||
|
throw Error("Input scheme with name %s already registered", schemeName);
|
||||||
|
inputSchemes->insert_or_assign(schemeName, std::move(inputScheme));
|
||||||
|
}
|
||||||
|
|
||||||
|
nlohmann::json dumpRegisterInputSchemeInfo() {
|
||||||
|
using nlohmann::json;
|
||||||
|
|
||||||
|
auto res = json::object();
|
||||||
|
|
||||||
|
for (auto & [name, scheme] : *inputSchemes) {
|
||||||
|
auto & r = res[name] = json::object();
|
||||||
|
r["allowedAttrs"] = scheme->allowedAttrs();
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
Input Input::fromURL(const std::string & url, bool requireTree)
|
Input Input::fromURL(const std::string & url, bool requireTree)
|
||||||
|
@ -33,7 +53,7 @@ static void fixupInput(Input & input)
|
||||||
|
|
||||||
Input Input::fromURL(const ParsedURL & url, bool requireTree)
|
Input Input::fromURL(const ParsedURL & url, bool requireTree)
|
||||||
{
|
{
|
||||||
for (auto & inputScheme : *inputSchemes) {
|
for (auto & [_, inputScheme] : *inputSchemes) {
|
||||||
auto res = inputScheme->inputFromURL(url, requireTree);
|
auto res = inputScheme->inputFromURL(url, requireTree);
|
||||||
if (res) {
|
if (res) {
|
||||||
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
||||||
|
@ -48,20 +68,44 @@ Input Input::fromURL(const ParsedURL & url, bool requireTree)
|
||||||
|
|
||||||
Input Input::fromAttrs(Attrs && attrs)
|
Input Input::fromAttrs(Attrs && attrs)
|
||||||
{
|
{
|
||||||
for (auto & inputScheme : *inputSchemes) {
|
auto schemeName = ({
|
||||||
auto res = inputScheme->inputFromAttrs(attrs);
|
auto schemeNameOpt = maybeGetStrAttr(attrs, "type");
|
||||||
if (res) {
|
if (!schemeNameOpt)
|
||||||
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
throw Error("'type' attribute to specify input scheme is required but not provided");
|
||||||
res->scheme = inputScheme;
|
*std::move(schemeNameOpt);
|
||||||
fixupInput(*res);
|
});
|
||||||
return std::move(*res);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Input input;
|
auto raw = [&]() {
|
||||||
input.attrs = attrs;
|
// Return an input without a scheme; most operations will fail,
|
||||||
fixupInput(input);
|
// but not all of them. Doing this is to support those other
|
||||||
return input;
|
// operations which are supposed to be robust on
|
||||||
|
// unknown/uninterpretable inputs.
|
||||||
|
Input input;
|
||||||
|
input.attrs = attrs;
|
||||||
|
fixupInput(input);
|
||||||
|
return input;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::shared_ptr<InputScheme> inputScheme = ({
|
||||||
|
auto i = inputSchemes->find(schemeName);
|
||||||
|
i == inputSchemes->end() ? nullptr : i->second;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!inputScheme) return raw();
|
||||||
|
|
||||||
|
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
||||||
|
|
||||||
|
auto allowedAttrs = inputScheme->allowedAttrs();
|
||||||
|
|
||||||
|
for (auto & [name, _] : attrs)
|
||||||
|
if (name != "type" && allowedAttrs.count(name) == 0)
|
||||||
|
throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName);
|
||||||
|
|
||||||
|
auto res = inputScheme->inputFromAttrs(attrs);
|
||||||
|
if (!res) return raw();
|
||||||
|
res->scheme = inputScheme;
|
||||||
|
fixupInput(*res);
|
||||||
|
return std::move(*res);
|
||||||
}
|
}
|
||||||
|
|
||||||
ParsedURL Input::toURL() const
|
ParsedURL Input::toURL() const
|
||||||
|
@ -176,6 +220,16 @@ std::pair<StorePath, Input> Input::fetch(ref<Store> store) const
|
||||||
return {std::move(storePath), input};
|
return {std::move(storePath), input};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, Input> Input::getAccessor(ref<Store> store) const
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return scheme->getAccessor(store, *this);
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Input Input::applyOverrides(
|
Input Input::applyOverrides(
|
||||||
std::optional<std::string> ref,
|
std::optional<std::string> ref,
|
||||||
std::optional<Hash> rev) const
|
std::optional<Hash> rev) const
|
||||||
|
@ -196,12 +250,13 @@ std::optional<Path> Input::getSourcePath() const
|
||||||
return scheme->getSourcePath(*this);
|
return scheme->getSourcePath(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Input::markChangedFile(
|
void Input::putFile(
|
||||||
std::string_view file,
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
std::optional<std::string> commitMsg) const
|
std::optional<std::string> commitMsg) const
|
||||||
{
|
{
|
||||||
assert(scheme);
|
assert(scheme);
|
||||||
return scheme->markChangedFile(*this, file, commitMsg);
|
return scheme->putFile(*this, path, contents, commitMsg);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string Input::getName() const
|
std::string Input::getName() const
|
||||||
|
@ -292,14 +347,18 @@ Input InputScheme::applyOverrides(
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Path> InputScheme::getSourcePath(const Input & input)
|
std::optional<Path> InputScheme::getSourcePath(const Input & input) const
|
||||||
{
|
{
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
void InputScheme::markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg)
|
void InputScheme::putFile(
|
||||||
|
const Input & input,
|
||||||
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
|
std::optional<std::string> commitMsg) const
|
||||||
{
|
{
|
||||||
assert(false);
|
throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path);
|
||||||
}
|
}
|
||||||
|
|
||||||
void InputScheme::clone(const Input & input, const Path & destDir) const
|
void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||||
|
@ -307,9 +366,26 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||||
throw Error("do not know how to clone input '%s'", input.to_string());
|
throw Error("do not know how to clone input '%s'", input.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<ExperimentalFeature> InputScheme::experimentalFeature()
|
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
||||||
|
{
|
||||||
|
auto [accessor, input2] = getAccessor(store, input);
|
||||||
|
auto storePath = accessor->root().fetchToStore(store, input2.getName());
|
||||||
|
return {storePath, input2};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, Input> InputScheme::getAccessor(ref<Store> store, const Input & input) const
|
||||||
|
{
|
||||||
|
throw UnimplementedError("InputScheme must implement fetch() or getAccessor()");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
|
||||||
{
|
{
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string publicKeys_to_string(const std::vector<PublicKey>& publicKeys)
|
||||||
|
{
|
||||||
|
return ((nlohmann::json) publicKeys).dump();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,13 +3,14 @@
|
||||||
|
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "path.hh"
|
#include "canon-path.hh"
|
||||||
#include "attrs.hh"
|
#include "attrs.hh"
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <nlohmann/json_fwd.hpp>
|
||||||
|
|
||||||
namespace nix { class Store; }
|
namespace nix { class Store; class StorePath; struct InputAccessor; }
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
@ -82,6 +83,8 @@ public:
|
||||||
*/
|
*/
|
||||||
std::pair<StorePath, Input> fetch(ref<Store> store) const;
|
std::pair<StorePath, Input> fetch(ref<Store> store) const;
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store) const;
|
||||||
|
|
||||||
Input applyOverrides(
|
Input applyOverrides(
|
||||||
std::optional<std::string> ref,
|
std::optional<std::string> ref,
|
||||||
std::optional<Hash> rev) const;
|
std::optional<Hash> rev) const;
|
||||||
|
@ -90,8 +93,13 @@ public:
|
||||||
|
|
||||||
std::optional<Path> getSourcePath() const;
|
std::optional<Path> getSourcePath() const;
|
||||||
|
|
||||||
void markChangedFile(
|
/**
|
||||||
std::string_view file,
|
* Write a file to this input, for input types that support
|
||||||
|
* writing. Optionally commit the change (for e.g. Git inputs).
|
||||||
|
*/
|
||||||
|
void putFile(
|
||||||
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
std::optional<std::string> commitMsg) const;
|
std::optional<std::string> commitMsg) const;
|
||||||
|
|
||||||
std::string getName() const;
|
std::string getName() const;
|
||||||
|
@ -126,6 +134,24 @@ struct InputScheme
|
||||||
|
|
||||||
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
|
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* What is the name of the scheme?
|
||||||
|
*
|
||||||
|
* The `type` attribute is used to select which input scheme is
|
||||||
|
* used, and then the other fields are forwarded to that input
|
||||||
|
* scheme.
|
||||||
|
*/
|
||||||
|
virtual std::string_view schemeName() const = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allowed attributes in an attribute set that is converted to an
|
||||||
|
* input.
|
||||||
|
*
|
||||||
|
* `type` is not included from this set, because the `type` field is
|
||||||
|
parsed first to choose which scheme; `type` is always required.
|
||||||
|
*/
|
||||||
|
virtual StringSet allowedAttrs() const = 0;
|
||||||
|
|
||||||
virtual ParsedURL toURL(const Input & input) const;
|
virtual ParsedURL toURL(const Input & input) const;
|
||||||
|
|
||||||
virtual Input applyOverrides(
|
virtual Input applyOverrides(
|
||||||
|
@ -135,16 +161,22 @@ struct InputScheme
|
||||||
|
|
||||||
virtual void clone(const Input & input, const Path & destDir) const;
|
virtual void clone(const Input & input, const Path & destDir) const;
|
||||||
|
|
||||||
virtual std::optional<Path> getSourcePath(const Input & input);
|
virtual std::optional<Path> getSourcePath(const Input & input) const;
|
||||||
|
|
||||||
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
virtual void putFile(
|
||||||
|
const Input & input,
|
||||||
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
|
std::optional<std::string> commitMsg) const;
|
||||||
|
|
||||||
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
|
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input);
|
||||||
|
|
||||||
|
virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is this `InputScheme` part of an experimental feature?
|
* Is this `InputScheme` part of an experimental feature?
|
||||||
*/
|
*/
|
||||||
virtual std::optional<ExperimentalFeature> experimentalFeature();
|
virtual std::optional<ExperimentalFeature> experimentalFeature() const;
|
||||||
|
|
||||||
virtual bool isDirect(const Input & input) const
|
virtual bool isDirect(const Input & input) const
|
||||||
{ return true; }
|
{ return true; }
|
||||||
|
@ -152,4 +184,15 @@ struct InputScheme
|
||||||
|
|
||||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||||
|
|
||||||
|
nlohmann::json dumpRegisterInputSchemeInfo();
|
||||||
|
|
||||||
|
struct PublicKey
|
||||||
|
{
|
||||||
|
std::string type = "ssh-ed25519";
|
||||||
|
std::string key;
|
||||||
|
};
|
||||||
|
NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(PublicKey, type, key)
|
||||||
|
|
||||||
|
std::string publicKeys_to_string(const std::vector<PublicKey>&);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,11 +36,11 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||||
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
|
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
Stat lstat(const CanonPath & path) override
|
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
auto absPath = makeAbsPath(path);
|
||||||
checkAllowed(absPath);
|
checkAllowed(absPath);
|
||||||
return PosixSourceAccessor::lstat(absPath);
|
return PosixSourceAccessor::maybeLstat(absPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
DirEntries readDirectory(const CanonPath & path) override
|
DirEntries readDirectory(const CanonPath & path) override
|
||||||
|
|
675
src/libfetchers/git-utils.cc
Normal file
675
src/libfetchers/git-utils.cc
Normal file
|
@ -0,0 +1,675 @@
|
||||||
|
#include "git-utils.hh"
|
||||||
|
#include "input-accessor.hh"
|
||||||
|
#include "cache.hh"
|
||||||
|
#include "finally.hh"
|
||||||
|
#include "processes.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
|
||||||
|
#include <boost/core/span.hpp>
|
||||||
|
|
||||||
|
#include <git2/blob.h>
|
||||||
|
#include <git2/commit.h>
|
||||||
|
#include <git2/config.h>
|
||||||
|
#include <git2/describe.h>
|
||||||
|
#include <git2/errors.h>
|
||||||
|
#include <git2/global.h>
|
||||||
|
#include <git2/object.h>
|
||||||
|
#include <git2/refs.h>
|
||||||
|
#include <git2/remote.h>
|
||||||
|
#include <git2/repository.h>
|
||||||
|
#include <git2/status.h>
|
||||||
|
#include <git2/submodule.h>
|
||||||
|
#include <git2/tree.h>
|
||||||
|
|
||||||
|
#include <unordered_set>
|
||||||
|
#include <queue>
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace std {
|
||||||
|
|
||||||
|
template<> struct hash<git_oid>
|
||||||
|
{
|
||||||
|
size_t operator()(const git_oid & oid) const
|
||||||
|
{
|
||||||
|
return * (size_t *) oid.id;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ostream & operator << (std::ostream & str, const git_oid & oid)
|
||||||
|
{
|
||||||
|
str << git_oid_tostr_s(&oid);
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator == (const git_oid & oid1, const git_oid & oid2)
|
||||||
|
{
|
||||||
|
return git_oid_equal(&oid1, &oid2);
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
// Some wrapper types that ensure that the git_*_free functions get called.
|
||||||
|
template<auto del>
|
||||||
|
struct Deleter
|
||||||
|
{
|
||||||
|
template <typename T>
|
||||||
|
void operator()(T * p) const { del(p); };
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef std::unique_ptr<git_repository, Deleter<git_repository_free>> Repository;
|
||||||
|
typedef std::unique_ptr<git_tree_entry, Deleter<git_tree_entry_free>> TreeEntry;
|
||||||
|
typedef std::unique_ptr<git_tree, Deleter<git_tree_free>> Tree;
|
||||||
|
typedef std::unique_ptr<git_treebuilder, Deleter<git_treebuilder_free>> TreeBuilder;
|
||||||
|
typedef std::unique_ptr<git_blob, Deleter<git_blob_free>> Blob;
|
||||||
|
typedef std::unique_ptr<git_object, Deleter<git_object_free>> Object;
|
||||||
|
typedef std::unique_ptr<git_commit, Deleter<git_commit_free>> Commit;
|
||||||
|
typedef std::unique_ptr<git_reference, Deleter<git_reference_free>> Reference;
|
||||||
|
typedef std::unique_ptr<git_describe_result, Deleter<git_describe_result_free>> DescribeResult;
|
||||||
|
typedef std::unique_ptr<git_status_list, Deleter<git_status_list_free>> StatusList;
|
||||||
|
typedef std::unique_ptr<git_remote, Deleter<git_remote_free>> Remote;
|
||||||
|
typedef std::unique_ptr<git_config, Deleter<git_config_free>> GitConfig;
|
||||||
|
typedef std::unique_ptr<git_config_iterator, Deleter<git_config_iterator_free>> ConfigIterator;
|
||||||
|
|
||||||
|
// A helper to ensure that we don't leak objects returned by libgit2.
|
||||||
|
template<typename T>
|
||||||
|
struct Setter
|
||||||
|
{
|
||||||
|
T & t;
|
||||||
|
typename T::pointer p = nullptr;
|
||||||
|
|
||||||
|
Setter(T & t) : t(t) { }
|
||||||
|
|
||||||
|
~Setter() { if (p) t = T(p); }
|
||||||
|
|
||||||
|
operator typename T::pointer * () { return &p; }
|
||||||
|
};
|
||||||
|
|
||||||
|
Hash toHash(const git_oid & oid)
|
||||||
|
{
|
||||||
|
#ifdef GIT_EXPERIMENTAL_SHA256
|
||||||
|
assert(oid.type == GIT_OID_SHA1);
|
||||||
|
#endif
|
||||||
|
Hash hash(htSHA1);
|
||||||
|
memcpy(hash.hash, oid.id, hash.hashSize);
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void initLibGit2()
|
||||||
|
{
|
||||||
|
if (git_libgit2_init() < 0)
|
||||||
|
throw Error("initialising libgit2: %s", git_error_last()->message);
|
||||||
|
}
|
||||||
|
|
||||||
|
git_oid hashToOID(const Hash & hash)
|
||||||
|
{
|
||||||
|
git_oid oid;
|
||||||
|
if (git_oid_fromstr(&oid, hash.gitRev().c_str()))
|
||||||
|
throw Error("cannot convert '%s' to a Git OID", hash.gitRev());
|
||||||
|
return oid;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object lookupObject(git_repository * repo, const git_oid & oid)
|
||||||
|
{
|
||||||
|
Object obj;
|
||||||
|
if (git_object_lookup(Setter(obj), repo, &oid, GIT_OBJECT_ANY)) {
|
||||||
|
auto err = git_error_last();
|
||||||
|
throw Error("getting Git object '%s': %s", oid, err->message);
|
||||||
|
}
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
T peelObject(git_repository * repo, git_object * obj, git_object_t type)
|
||||||
|
{
|
||||||
|
T obj2;
|
||||||
|
if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) {
|
||||||
|
auto err = git_error_last();
|
||||||
|
throw Error("peeling Git object '%s': %s", git_object_id(obj), err->message);
|
||||||
|
}
|
||||||
|
return obj2;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
|
{
|
||||||
|
CanonPath path;
|
||||||
|
Repository repo;
|
||||||
|
|
||||||
|
GitRepoImpl(CanonPath _path, bool create, bool bare)
|
||||||
|
: path(std::move(_path))
|
||||||
|
{
|
||||||
|
initLibGit2();
|
||||||
|
|
||||||
|
if (pathExists(path.abs())) {
|
||||||
|
if (git_repository_open(Setter(repo), path.c_str()))
|
||||||
|
throw Error("opening Git repository '%s': %s", path, git_error_last()->message);
|
||||||
|
} else {
|
||||||
|
if (git_repository_init(Setter(repo), path.c_str(), bare))
|
||||||
|
throw Error("creating Git repository '%s': %s", path, git_error_last()->message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
operator git_repository * ()
|
||||||
|
{
|
||||||
|
return repo.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t getRevCount(const Hash & rev) override
|
||||||
|
{
|
||||||
|
std::unordered_set<git_oid> done;
|
||||||
|
std::queue<Commit> todo;
|
||||||
|
|
||||||
|
todo.push(peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT));
|
||||||
|
|
||||||
|
while (auto commit = pop(todo)) {
|
||||||
|
if (!done.insert(*git_commit_id(commit->get())).second) continue;
|
||||||
|
|
||||||
|
for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) {
|
||||||
|
git_commit * parent;
|
||||||
|
if (git_commit_parent(&parent, commit->get(), n))
|
||||||
|
throw Error("getting parent of Git commit '%s': %s", *git_commit_id(commit->get()), git_error_last()->message);
|
||||||
|
todo.push(Commit(parent));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return done.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t getLastModified(const Hash & rev) override
|
||||||
|
{
|
||||||
|
auto commit = peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT);
|
||||||
|
|
||||||
|
return git_commit_time(commit.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isShallow() override
|
||||||
|
{
|
||||||
|
return git_repository_is_shallow(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
Hash resolveRef(std::string ref) override
|
||||||
|
{
|
||||||
|
// Handle revisions used as refs.
|
||||||
|
{
|
||||||
|
git_oid oid;
|
||||||
|
if (git_oid_fromstr(&oid, ref.c_str()) == 0)
|
||||||
|
return toHash(oid);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve short names like 'master'.
|
||||||
|
Reference ref2;
|
||||||
|
if (!git_reference_dwim(Setter(ref2), *this, ref.c_str()))
|
||||||
|
ref = git_reference_name(ref2.get());
|
||||||
|
|
||||||
|
// Resolve full references like 'refs/heads/master'.
|
||||||
|
Reference ref3;
|
||||||
|
if (git_reference_lookup(Setter(ref3), *this, ref.c_str()))
|
||||||
|
throw Error("resolving Git reference '%s': %s", ref, git_error_last()->message);
|
||||||
|
|
||||||
|
auto oid = git_reference_target(ref3.get());
|
||||||
|
if (!oid)
|
||||||
|
throw Error("cannot get OID for Git reference '%s'", git_reference_name(ref3.get()));
|
||||||
|
|
||||||
|
return toHash(*oid);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Submodule> parseSubmodules(const CanonPath & configFile)
|
||||||
|
{
|
||||||
|
GitConfig config;
|
||||||
|
if (git_config_open_ondisk(Setter(config), configFile.abs().c_str()))
|
||||||
|
throw Error("parsing .gitmodules file: %s", git_error_last()->message);
|
||||||
|
|
||||||
|
ConfigIterator it;
|
||||||
|
if (git_config_iterator_glob_new(Setter(it), config.get(), "^submodule\\..*\\.(path|url|branch)$"))
|
||||||
|
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
|
||||||
|
|
||||||
|
std::map<std::string, std::string> entries;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
git_config_entry * entry = nullptr;
|
||||||
|
if (auto err = git_config_next(&entry, it.get())) {
|
||||||
|
if (err == GIT_ITEROVER) break;
|
||||||
|
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
|
||||||
|
}
|
||||||
|
entries.emplace(entry->name + 10, entry->value);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Submodule> result;
|
||||||
|
|
||||||
|
for (auto & [key, value] : entries) {
|
||||||
|
if (!hasSuffix(key, ".path")) continue;
|
||||||
|
std::string key2(key, 0, key.size() - 5);
|
||||||
|
auto path = CanonPath(value);
|
||||||
|
result.push_back(Submodule {
|
||||||
|
.path = path,
|
||||||
|
.url = entries[key2 + ".url"],
|
||||||
|
.branch = entries[key2 + ".branch"],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper for statusCallback below.
|
||||||
|
static int statusCallbackTrampoline(const char * path, unsigned int statusFlags, void * payload)
|
||||||
|
{
|
||||||
|
return (*((std::function<int(const char * path, unsigned int statusFlags)> *) payload))(path, statusFlags);
|
||||||
|
}
|
||||||
|
|
||||||
|
WorkdirInfo getWorkdirInfo() override
|
||||||
|
{
|
||||||
|
WorkdirInfo info;
|
||||||
|
|
||||||
|
/* Get the head revision, if any. */
|
||||||
|
git_oid headRev;
|
||||||
|
if (auto err = git_reference_name_to_id(&headRev, *this, "HEAD")) {
|
||||||
|
if (err != GIT_ENOTFOUND)
|
||||||
|
throw Error("resolving HEAD: %s", git_error_last()->message);
|
||||||
|
} else
|
||||||
|
info.headRev = toHash(headRev);
|
||||||
|
|
||||||
|
/* Get all tracked files and determine whether the working
|
||||||
|
directory is dirty. */
|
||||||
|
std::function<int(const char * path, unsigned int statusFlags)> statusCallback = [&](const char * path, unsigned int statusFlags)
|
||||||
|
{
|
||||||
|
if (!(statusFlags & GIT_STATUS_INDEX_DELETED) &&
|
||||||
|
!(statusFlags & GIT_STATUS_WT_DELETED))
|
||||||
|
info.files.insert(CanonPath(path));
|
||||||
|
if (statusFlags != GIT_STATUS_CURRENT)
|
||||||
|
info.isDirty = true;
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
git_status_options options = GIT_STATUS_OPTIONS_INIT;
|
||||||
|
options.flags |= GIT_STATUS_OPT_INCLUDE_UNMODIFIED;
|
||||||
|
options.flags |= GIT_STATUS_OPT_EXCLUDE_SUBMODULES;
|
||||||
|
if (git_status_foreach_ext(*this, &options, &statusCallbackTrampoline, &statusCallback))
|
||||||
|
throw Error("getting working directory status: %s", git_error_last()->message);
|
||||||
|
|
||||||
|
/* Get submodule info. */
|
||||||
|
auto modulesFile = path + ".gitmodules";
|
||||||
|
if (pathExists(modulesFile.abs()))
|
||||||
|
info.submodules = parseSubmodules(modulesFile);
|
||||||
|
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> getWorkdirRef() override
|
||||||
|
{
|
||||||
|
Reference ref;
|
||||||
|
if (git_reference_lookup(Setter(ref), *this, "HEAD"))
|
||||||
|
throw Error("looking up HEAD: %s", git_error_last()->message);
|
||||||
|
|
||||||
|
if (auto target = git_reference_symbolic_target(ref.get()))
|
||||||
|
return target;
|
||||||
|
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev) override;
|
||||||
|
|
||||||
|
std::string resolveSubmoduleUrl(
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & base) override
|
||||||
|
{
|
||||||
|
git_buf buf = GIT_BUF_INIT;
|
||||||
|
if (git_submodule_resolve_url(&buf, *this, url.c_str()))
|
||||||
|
throw Error("resolving Git submodule URL '%s'", url);
|
||||||
|
Finally cleanup = [&]() { git_buf_dispose(&buf); };
|
||||||
|
|
||||||
|
std::string res(buf.ptr);
|
||||||
|
|
||||||
|
if (!hasPrefix(res, "/") && res.find("://") == res.npos)
|
||||||
|
res = parseURL(base + "/" + res).canonicalise().to_string();
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool hasObject(const Hash & oid_) override
|
||||||
|
{
|
||||||
|
auto oid = hashToOID(oid_);
|
||||||
|
|
||||||
|
Object obj;
|
||||||
|
if (auto errCode = git_object_lookup(Setter(obj), *this, &oid, GIT_OBJECT_ANY)) {
|
||||||
|
if (errCode == GIT_ENOTFOUND) return false;
|
||||||
|
auto err = git_error_last();
|
||||||
|
throw Error("getting Git object '%s': %s", oid, err->message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<InputAccessor> getAccessor(const Hash & rev) override;
|
||||||
|
|
||||||
|
static int sidebandProgressCallback(const char * str, int len, void * payload)
|
||||||
|
{
|
||||||
|
auto act = (Activity *) payload;
|
||||||
|
act->result(resFetchStatus, trim(std::string_view(str, len)));
|
||||||
|
return _isInterrupted ? -1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int transferProgressCallback(const git_indexer_progress * stats, void * payload)
|
||||||
|
{
|
||||||
|
auto act = (Activity *) payload;
|
||||||
|
act->result(resFetchStatus,
|
||||||
|
fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB",
|
||||||
|
stats->received_objects,
|
||||||
|
stats->total_objects,
|
||||||
|
stats->indexed_deltas,
|
||||||
|
stats->total_deltas,
|
||||||
|
stats->received_bytes / (1024.0 * 1024.0)));
|
||||||
|
return _isInterrupted ? -1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void fetch(
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & refspec,
|
||||||
|
bool shallow) override
|
||||||
|
{
|
||||||
|
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
||||||
|
|
||||||
|
Remote remote;
|
||||||
|
|
||||||
|
if (git_remote_create_anonymous(Setter(remote), *this, url.c_str()))
|
||||||
|
throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message);
|
||||||
|
|
||||||
|
char * refspecs[] = {(char *) refspec.c_str()};
|
||||||
|
git_strarray refspecs2 {
|
||||||
|
.strings = refspecs,
|
||||||
|
.count = 1
|
||||||
|
};
|
||||||
|
|
||||||
|
git_fetch_options opts = GIT_FETCH_OPTIONS_INIT;
|
||||||
|
opts.depth = shallow ? 1 : GIT_FETCH_DEPTH_FULL;
|
||||||
|
opts.callbacks.payload = &act;
|
||||||
|
opts.callbacks.sideband_progress = sidebandProgressCallback;
|
||||||
|
opts.callbacks.transfer_progress = transferProgressCallback;
|
||||||
|
|
||||||
|
if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr))
|
||||||
|
throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message);
|
||||||
|
}
|
||||||
|
|
||||||
|
void verifyCommit(
|
||||||
|
const Hash & rev,
|
||||||
|
const std::vector<fetchers::PublicKey> & publicKeys) override
|
||||||
|
{
|
||||||
|
// Create ad-hoc allowedSignersFile and populate it with publicKeys
|
||||||
|
auto allowedSignersFile = createTempFile().second;
|
||||||
|
std::string allowedSigners;
|
||||||
|
for (const fetchers::PublicKey & k : publicKeys) {
|
||||||
|
if (k.type != "ssh-dsa"
|
||||||
|
&& k.type != "ssh-ecdsa"
|
||||||
|
&& k.type != "ssh-ecdsa-sk"
|
||||||
|
&& k.type != "ssh-ed25519"
|
||||||
|
&& k.type != "ssh-ed25519-sk"
|
||||||
|
&& k.type != "ssh-rsa")
|
||||||
|
throw Error("Unknown key type '%s'.\n"
|
||||||
|
"Please use one of\n"
|
||||||
|
"- ssh-dsa\n"
|
||||||
|
" ssh-ecdsa\n"
|
||||||
|
" ssh-ecdsa-sk\n"
|
||||||
|
" ssh-ed25519\n"
|
||||||
|
" ssh-ed25519-sk\n"
|
||||||
|
" ssh-rsa", k.type);
|
||||||
|
allowedSigners += "* " + k.type + " " + k.key + "\n";
|
||||||
|
}
|
||||||
|
writeFile(allowedSignersFile, allowedSigners);
|
||||||
|
|
||||||
|
// Run verification command
|
||||||
|
auto [status, output] = runProgram(RunOptions {
|
||||||
|
.program = "git",
|
||||||
|
.args = {
|
||||||
|
"-c",
|
||||||
|
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||||
|
"-C", path.abs(),
|
||||||
|
"verify-commit",
|
||||||
|
rev.gitRev()
|
||||||
|
},
|
||||||
|
.mergeStderrToStdout = true,
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Evaluate result through status code and checking if public
|
||||||
|
key fingerprints appear on stderr. This is neccessary
|
||||||
|
because the git command might also succeed due to the
|
||||||
|
commit being signed by gpg keys that are present in the
|
||||||
|
users key agent. */
|
||||||
|
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
||||||
|
for (const fetchers::PublicKey & k : publicKeys){
|
||||||
|
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
||||||
|
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
||||||
|
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
||||||
|
re += "(" + escaped_fingerprint + ")";
|
||||||
|
}
|
||||||
|
re += "]";
|
||||||
|
if (status == 0 && std::regex_search(output, std::regex(re)))
|
||||||
|
printTalkative("Signature verification on commit %s succeeded.", rev.gitRev());
|
||||||
|
else
|
||||||
|
throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)
|
||||||
|
{
|
||||||
|
return make_ref<GitRepoImpl>(path, create, bare);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct GitInputAccessor : InputAccessor
|
||||||
|
{
|
||||||
|
ref<GitRepoImpl> repo;
|
||||||
|
Tree root;
|
||||||
|
|
||||||
|
GitInputAccessor(ref<GitRepoImpl> repo_, const Hash & rev)
|
||||||
|
: repo(repo_)
|
||||||
|
, root(peelObject<Tree>(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string readBlob(const CanonPath & path, bool symlink)
|
||||||
|
{
|
||||||
|
auto blob = getBlob(path, symlink);
|
||||||
|
|
||||||
|
auto data = std::string_view((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get()));
|
||||||
|
|
||||||
|
return std::string(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string readFile(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
return readBlob(path, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool pathExists(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
return path.isRoot() ? true : (bool) lookup(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
if (path.isRoot())
|
||||||
|
return Stat { .type = tDirectory };
|
||||||
|
|
||||||
|
auto entry = lookup(path);
|
||||||
|
if (!entry)
|
||||||
|
return std::nullopt;
|
||||||
|
|
||||||
|
auto mode = git_tree_entry_filemode(entry);
|
||||||
|
|
||||||
|
if (mode == GIT_FILEMODE_TREE)
|
||||||
|
return Stat { .type = tDirectory };
|
||||||
|
|
||||||
|
else if (mode == GIT_FILEMODE_BLOB)
|
||||||
|
return Stat { .type = tRegular };
|
||||||
|
|
||||||
|
else if (mode == GIT_FILEMODE_BLOB_EXECUTABLE)
|
||||||
|
return Stat { .type = tRegular, .isExecutable = true };
|
||||||
|
|
||||||
|
else if (mode == GIT_FILEMODE_LINK)
|
||||||
|
return Stat { .type = tSymlink };
|
||||||
|
|
||||||
|
else if (mode == GIT_FILEMODE_COMMIT)
|
||||||
|
// Treat submodules as an empty directory.
|
||||||
|
return Stat { .type = tDirectory };
|
||||||
|
|
||||||
|
else
|
||||||
|
throw Error("file '%s' has an unsupported Git file type");
|
||||||
|
}
|
||||||
|
|
||||||
|
DirEntries readDirectory(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](Tree tree) {
|
||||||
|
DirEntries res;
|
||||||
|
|
||||||
|
auto count = git_tree_entrycount(tree.get());
|
||||||
|
|
||||||
|
for (size_t n = 0; n < count; ++n) {
|
||||||
|
auto entry = git_tree_entry_byindex(tree.get(), n);
|
||||||
|
// FIXME: add to cache
|
||||||
|
res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{});
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
},
|
||||||
|
[&](Submodule) {
|
||||||
|
return DirEntries();
|
||||||
|
}
|
||||||
|
}, getTree(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string readLink(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
return readBlob(path, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
Hash getSubmoduleRev(const CanonPath & path)
|
||||||
|
{
|
||||||
|
auto entry = need(path);
|
||||||
|
|
||||||
|
if (git_tree_entry_type(entry) != GIT_OBJECT_COMMIT)
|
||||||
|
throw Error("'%s' is not a submodule", showPath(path));
|
||||||
|
|
||||||
|
return toHash(*git_tree_entry_id(entry));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::map<CanonPath, TreeEntry> lookupCache;
|
||||||
|
|
||||||
|
/* Recursively look up 'path' relative to the root. */
|
||||||
|
git_tree_entry * lookup(const CanonPath & path)
|
||||||
|
{
|
||||||
|
if (path.isRoot()) return nullptr;
|
||||||
|
|
||||||
|
auto i = lookupCache.find(path);
|
||||||
|
if (i == lookupCache.end()) {
|
||||||
|
TreeEntry entry;
|
||||||
|
if (auto err = git_tree_entry_bypath(Setter(entry), root.get(), std::string(path.rel()).c_str())) {
|
||||||
|
if (err != GIT_ENOTFOUND)
|
||||||
|
throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
|
||||||
|
}
|
||||||
|
|
||||||
|
i = lookupCache.emplace(path, std::move(entry)).first;
|
||||||
|
}
|
||||||
|
|
||||||
|
return &*i->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
git_tree_entry * need(const CanonPath & path)
|
||||||
|
{
|
||||||
|
auto entry = lookup(path);
|
||||||
|
if (!entry)
|
||||||
|
throw Error("'%s' does not exist", showPath(path));
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Submodule { };
|
||||||
|
|
||||||
|
std::variant<Tree, Submodule> getTree(const CanonPath & path)
|
||||||
|
{
|
||||||
|
if (path.isRoot()) {
|
||||||
|
Tree tree;
|
||||||
|
if (git_tree_dup(Setter(tree), root.get()))
|
||||||
|
throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto entry = need(path);
|
||||||
|
|
||||||
|
if (git_tree_entry_type(entry) == GIT_OBJECT_COMMIT)
|
||||||
|
return Submodule();
|
||||||
|
|
||||||
|
if (git_tree_entry_type(entry) != GIT_OBJECT_TREE)
|
||||||
|
throw Error("'%s' is not a directory", showPath(path));
|
||||||
|
|
||||||
|
Tree tree;
|
||||||
|
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
||||||
|
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
||||||
|
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
|
||||||
|
Blob getBlob(const CanonPath & path, bool expectSymlink)
|
||||||
|
{
|
||||||
|
auto notExpected = [&]()
|
||||||
|
{
|
||||||
|
throw Error(
|
||||||
|
expectSymlink
|
||||||
|
? "'%s' is not a symlink"
|
||||||
|
: "'%s' is not a regular file",
|
||||||
|
showPath(path));
|
||||||
|
};
|
||||||
|
|
||||||
|
if (path.isRoot()) notExpected();
|
||||||
|
|
||||||
|
auto entry = need(path);
|
||||||
|
|
||||||
|
if (git_tree_entry_type(entry) != GIT_OBJECT_BLOB)
|
||||||
|
notExpected();
|
||||||
|
|
||||||
|
auto mode = git_tree_entry_filemode(entry);
|
||||||
|
if (expectSymlink) {
|
||||||
|
if (mode != GIT_FILEMODE_LINK)
|
||||||
|
notExpected();
|
||||||
|
} else {
|
||||||
|
if (mode != GIT_FILEMODE_BLOB && mode != GIT_FILEMODE_BLOB_EXECUTABLE)
|
||||||
|
notExpected();
|
||||||
|
}
|
||||||
|
|
||||||
|
Blob blob;
|
||||||
|
if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *repo, entry))
|
||||||
|
throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message);
|
||||||
|
|
||||||
|
return blob;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ref<InputAccessor> GitRepoImpl::getAccessor(const Hash & rev)
|
||||||
|
{
|
||||||
|
return make_ref<GitInputAccessor>(ref<GitRepoImpl>(shared_from_this()), rev);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules(const Hash & rev)
|
||||||
|
{
|
||||||
|
/* Read the .gitmodules files from this revision. */
|
||||||
|
CanonPath modulesFile(".gitmodules");
|
||||||
|
|
||||||
|
auto accessor = getAccessor(rev);
|
||||||
|
if (!accessor->pathExists(modulesFile)) return {};
|
||||||
|
|
||||||
|
/* Parse it and get the revision of each submodule. */
|
||||||
|
auto configS = accessor->readFile(modulesFile);
|
||||||
|
|
||||||
|
auto [fdTemp, pathTemp] = createTempFile("nix-git-submodules");
|
||||||
|
writeFull(fdTemp.get(), configS);
|
||||||
|
|
||||||
|
std::vector<std::tuple<Submodule, Hash>> result;
|
||||||
|
|
||||||
|
for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) {
|
||||||
|
auto rev = accessor.dynamic_pointer_cast<GitInputAccessor>()->getSubmoduleRev(submodule.path);
|
||||||
|
result.push_back({std::move(submodule), rev});
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
90
src/libfetchers/git-utils.hh
Normal file
90
src/libfetchers/git-utils.hh
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
namespace fetchers { struct PublicKey; }
|
||||||
|
|
||||||
|
struct GitRepo
|
||||||
|
{
|
||||||
|
virtual ~GitRepo()
|
||||||
|
{ }
|
||||||
|
|
||||||
|
static ref<GitRepo> openRepo(const CanonPath & path, bool create = false, bool bare = false);
|
||||||
|
|
||||||
|
virtual uint64_t getRevCount(const Hash & rev) = 0;
|
||||||
|
|
||||||
|
virtual uint64_t getLastModified(const Hash & rev) = 0;
|
||||||
|
|
||||||
|
virtual bool isShallow() = 0;
|
||||||
|
|
||||||
|
/* Return the commit hash to which a ref points. */
|
||||||
|
virtual Hash resolveRef(std::string ref) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Info about a submodule.
|
||||||
|
*/
|
||||||
|
struct Submodule
|
||||||
|
{
|
||||||
|
CanonPath path;
|
||||||
|
std::string url;
|
||||||
|
std::string branch;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct WorkdirInfo
|
||||||
|
{
|
||||||
|
bool isDirty = false;
|
||||||
|
|
||||||
|
/* The checked out commit, or nullopt if there are no commits
|
||||||
|
in the repo yet. */
|
||||||
|
std::optional<Hash> headRev;
|
||||||
|
|
||||||
|
/* All files in the working directory that are unchanged,
|
||||||
|
modified or added, but excluding deleted files. */
|
||||||
|
std::set<CanonPath> files;
|
||||||
|
|
||||||
|
/* The submodules listed in .gitmodules of this workdir. */
|
||||||
|
std::vector<Submodule> submodules;
|
||||||
|
};
|
||||||
|
|
||||||
|
virtual WorkdirInfo getWorkdirInfo() = 0;
|
||||||
|
|
||||||
|
/* Get the ref that HEAD points to. */
|
||||||
|
virtual std::optional<std::string> getWorkdirRef() = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the submodules of this repo at the indicated revision,
|
||||||
|
* along with the revision of each submodule.
|
||||||
|
*/
|
||||||
|
virtual std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev) = 0;
|
||||||
|
|
||||||
|
virtual std::string resolveSubmoduleUrl(
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & base) = 0;
|
||||||
|
|
||||||
|
struct TarballInfo
|
||||||
|
{
|
||||||
|
Hash treeHash;
|
||||||
|
time_t lastModified;
|
||||||
|
};
|
||||||
|
|
||||||
|
virtual bool hasObject(const Hash & oid) = 0;
|
||||||
|
|
||||||
|
virtual ref<InputAccessor> getAccessor(const Hash & rev) = 0;
|
||||||
|
|
||||||
|
virtual void fetch(
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & refspec,
|
||||||
|
bool shallow) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify that commit `rev` is signed by one of the keys in
|
||||||
|
* `publicKeys`. Throw an error if it isn't.
|
||||||
|
*/
|
||||||
|
virtual void verifyCommit(
|
||||||
|
const Hash & rev,
|
||||||
|
const std::vector<fetchers::PublicKey> & publicKeys) = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -1,12 +1,16 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
|
#include "users.hh"
|
||||||
#include "cache.hh"
|
#include "cache.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "tarfile.hh"
|
#include "tarfile.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "url-parts.hh"
|
#include "url-parts.hh"
|
||||||
#include "pathlocks.hh"
|
#include "pathlocks.hh"
|
||||||
#include "util.hh"
|
#include "processes.hh"
|
||||||
#include "git.hh"
|
#include "git.hh"
|
||||||
|
#include "fs-input-accessor.hh"
|
||||||
|
#include "mounted-input-accessor.hh"
|
||||||
|
#include "git-utils.hh"
|
||||||
|
|
||||||
#include "fetch-settings.hh"
|
#include "fetch-settings.hh"
|
||||||
|
|
||||||
|
@ -132,126 +136,19 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isNotDotGitDirectory(const Path & path)
|
std::vector<PublicKey> getPublicKeys(const Attrs & attrs)
|
||||||
{
|
{
|
||||||
return baseNameOf(path) != ".git";
|
std::vector<PublicKey> publicKeys;
|
||||||
|
if (attrs.contains("publicKeys")) {
|
||||||
|
nlohmann::json publicKeysJson = nlohmann::json::parse(getStrAttr(attrs, "publicKeys"));
|
||||||
|
ensureType(publicKeysJson, nlohmann::json::value_t::array);
|
||||||
|
publicKeys = publicKeysJson.get<std::vector<PublicKey>>();
|
||||||
|
}
|
||||||
|
if (attrs.contains("publicKey"))
|
||||||
|
publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")});
|
||||||
|
return publicKeys;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct WorkdirInfo
|
|
||||||
{
|
|
||||||
bool clean = false;
|
|
||||||
bool hasHead = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Returns whether a git workdir is clean and has commits.
|
|
||||||
WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
|
|
||||||
{
|
|
||||||
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
|
||||||
std::string gitDir(".git");
|
|
||||||
|
|
||||||
auto env = getEnv();
|
|
||||||
// Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
|
|
||||||
// that way unknown errors can lead to a failure instead of continuing through the wrong code path
|
|
||||||
env["LC_ALL"] = "C";
|
|
||||||
|
|
||||||
/* Check whether HEAD points to something that looks like a commit,
|
|
||||||
since that is the refrence we want to use later on. */
|
|
||||||
auto result = runProgram(RunOptions {
|
|
||||||
.program = "git",
|
|
||||||
.args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
|
|
||||||
.environment = env,
|
|
||||||
.mergeStderrToStdout = true
|
|
||||||
});
|
|
||||||
auto exitCode = WEXITSTATUS(result.first);
|
|
||||||
auto errorMessage = result.second;
|
|
||||||
|
|
||||||
if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
|
|
||||||
throw Error("'%s' is not a Git repository", workdir);
|
|
||||||
} else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
|
|
||||||
// indicates that the repo does not have any commits
|
|
||||||
// we want to proceed and will consider it dirty later
|
|
||||||
} else if (exitCode != 0) {
|
|
||||||
// any other errors should lead to a failure
|
|
||||||
throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool clean = false;
|
|
||||||
bool hasHead = exitCode == 0;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (hasHead) {
|
|
||||||
// Using git diff is preferrable over lower-level operations here,
|
|
||||||
// because its conceptually simpler and we only need the exit code anyways.
|
|
||||||
auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
|
|
||||||
if (!submodules) {
|
|
||||||
// Changes in submodules should only make the tree dirty
|
|
||||||
// when those submodules will be copied as well.
|
|
||||||
gitDiffOpts.emplace_back("--ignore-submodules");
|
|
||||||
}
|
|
||||||
gitDiffOpts.emplace_back("--");
|
|
||||||
runProgram("git", true, gitDiffOpts);
|
|
||||||
|
|
||||||
clean = true;
|
|
||||||
}
|
|
||||||
} catch (ExecError & e) {
|
|
||||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
|
||||||
}
|
|
||||||
|
|
||||||
return WorkdirInfo { .clean = clean, .hasHead = hasHead };
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
|
|
||||||
{
|
|
||||||
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
|
||||||
auto gitDir = ".git";
|
|
||||||
|
|
||||||
if (!fetchSettings.allowDirty)
|
|
||||||
throw Error("Git tree '%s' is dirty", workdir);
|
|
||||||
|
|
||||||
if (fetchSettings.warnDirty)
|
|
||||||
warn("Git tree '%s' is dirty", workdir);
|
|
||||||
|
|
||||||
auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
|
|
||||||
if (submodules)
|
|
||||||
gitOpts.emplace_back("--recurse-submodules");
|
|
||||||
|
|
||||||
auto files = tokenizeString<std::set<std::string>>(
|
|
||||||
runProgram("git", true, gitOpts), "\0"s);
|
|
||||||
|
|
||||||
Path actualPath(absPath(workdir));
|
|
||||||
|
|
||||||
PathFilter filter = [&](const Path & p) -> bool {
|
|
||||||
assert(hasPrefix(p, actualPath));
|
|
||||||
std::string file(p, actualPath.size() + 1);
|
|
||||||
|
|
||||||
auto st = lstat(p);
|
|
||||||
|
|
||||||
if (S_ISDIR(st.st_mode)) {
|
|
||||||
auto prefix = file + "/";
|
|
||||||
auto i = files.lower_bound(prefix);
|
|
||||||
return i != files.end() && hasPrefix(*i, prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
return files.count(file);
|
|
||||||
};
|
|
||||||
|
|
||||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
|
||||||
|
|
||||||
// FIXME: maybe we should use the timestamp of the last
|
|
||||||
// modified dirty file?
|
|
||||||
input.attrs.insert_or_assign(
|
|
||||||
"lastModified",
|
|
||||||
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
|
||||||
|
|
||||||
if (workdirInfo.hasHead) {
|
|
||||||
input.attrs.insert_or_assign("dirtyRev", chomp(
|
|
||||||
runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "HEAD" })) + "-dirty");
|
|
||||||
input.attrs.insert_or_assign("dirtyShortRev", chomp(
|
|
||||||
runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "rev-parse", "--verify", "--short", "HEAD" })) + "-dirty");
|
|
||||||
}
|
|
||||||
|
|
||||||
return {std::move(storePath), input};
|
|
||||||
}
|
|
||||||
} // end namespace
|
} // end namespace
|
||||||
|
|
||||||
struct GitInputScheme : InputScheme
|
struct GitInputScheme : InputScheme
|
||||||
|
@ -273,9 +170,9 @@ struct GitInputScheme : InputScheme
|
||||||
attrs.emplace("type", "git");
|
attrs.emplace("type", "git");
|
||||||
|
|
||||||
for (auto & [name, value] : url.query) {
|
for (auto & [name, value] : url.query) {
|
||||||
if (name == "rev" || name == "ref")
|
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
|
||||||
attrs.emplace(name, value);
|
attrs.emplace(name, value);
|
||||||
else if (name == "shallow" || name == "submodules" || name == "allRefs")
|
else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit")
|
||||||
attrs.emplace(name, Explicit<bool> { value == "1" });
|
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||||
else
|
else
|
||||||
url2.query.emplace(name, value);
|
url2.query.emplace(name, value);
|
||||||
|
@ -286,17 +183,44 @@ struct GitInputScheme : InputScheme
|
||||||
return inputFromAttrs(attrs);
|
return inputFromAttrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::string_view schemeName() const override
|
||||||
|
{
|
||||||
|
return "git";
|
||||||
|
}
|
||||||
|
|
||||||
|
StringSet allowedAttrs() const override
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
"url",
|
||||||
|
"ref",
|
||||||
|
"rev",
|
||||||
|
"shallow",
|
||||||
|
"submodules",
|
||||||
|
"lastModified",
|
||||||
|
"revCount",
|
||||||
|
"narHash",
|
||||||
|
"allRefs",
|
||||||
|
"name",
|
||||||
|
"dirtyRev",
|
||||||
|
"dirtyShortRev",
|
||||||
|
"verifyCommit",
|
||||||
|
"keytype",
|
||||||
|
"publicKey",
|
||||||
|
"publicKeys",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != "git") return {};
|
for (auto & [name, _] : attrs)
|
||||||
|
if (name == "verifyCommit"
|
||||||
|
|| name == "keytype"
|
||||||
|
|| name == "publicKey"
|
||||||
|
|| name == "publicKeys")
|
||||||
|
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
maybeGetBoolAttr(attrs, "verifyCommit");
|
||||||
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name" && name != "dirtyRev" && name != "dirtyShortRev")
|
|
||||||
throw Error("unsupported Git input attribute '%s'", name);
|
|
||||||
|
|
||||||
maybeGetBoolAttr(attrs, "shallow");
|
|
||||||
maybeGetBoolAttr(attrs, "submodules");
|
|
||||||
maybeGetBoolAttr(attrs, "allRefs");
|
|
||||||
|
|
||||||
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
||||||
if (std::regex_search(*ref, badGitRefRegex))
|
if (std::regex_search(*ref, badGitRefRegex))
|
||||||
|
@ -308,6 +232,9 @@ struct GitInputScheme : InputScheme
|
||||||
auto url = fixGitURL(getStrAttr(attrs, "url"));
|
auto url = fixGitURL(getStrAttr(attrs, "url"));
|
||||||
parseURL(url);
|
parseURL(url);
|
||||||
input.attrs["url"] = url;
|
input.attrs["url"] = url;
|
||||||
|
getShallowAttr(input);
|
||||||
|
getSubmodulesAttr(input);
|
||||||
|
getAllRefsAttr(input);
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -317,8 +244,19 @@ struct GitInputScheme : InputScheme
|
||||||
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
|
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
|
||||||
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
|
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
|
||||||
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
||||||
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
|
if (getShallowAttr(input))
|
||||||
url.query.insert_or_assign("shallow", "1");
|
url.query.insert_or_assign("shallow", "1");
|
||||||
|
if (getSubmodulesAttr(input))
|
||||||
|
url.query.insert_or_assign("submodules", "1");
|
||||||
|
if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false))
|
||||||
|
url.query.insert_or_assign("verifyCommit", "1");
|
||||||
|
auto publicKeys = getPublicKeys(input.attrs);
|
||||||
|
if (publicKeys.size() == 1) {
|
||||||
|
url.query.insert_or_assign("keytype", publicKeys.at(0).type);
|
||||||
|
url.query.insert_or_assign("publicKey", publicKeys.at(0).key);
|
||||||
|
}
|
||||||
|
else if (publicKeys.size() > 1)
|
||||||
|
url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys));
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,11 +275,11 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
void clone(const Input & input, const Path & destDir) const override
|
void clone(const Input & input, const Path & destDir) const override
|
||||||
{
|
{
|
||||||
auto [isLocal, actualUrl] = getActualUrl(input);
|
auto repoInfo = getRepoInfo(input);
|
||||||
|
|
||||||
Strings args = {"clone"};
|
Strings args = {"clone"};
|
||||||
|
|
||||||
args.push_back(actualUrl);
|
args.push_back(repoInfo.url);
|
||||||
|
|
||||||
if (auto ref = input.getRef()) {
|
if (auto ref = input.getRef()) {
|
||||||
args.push_back("--branch");
|
args.push_back("--branch");
|
||||||
|
@ -355,30 +293,87 @@ struct GitInputScheme : InputScheme
|
||||||
runProgram("git", true, args, {}, true);
|
runProgram("git", true, args, {}, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Path> getSourcePath(const Input & input) override
|
std::optional<Path> getSourcePath(const Input & input) const override
|
||||||
{
|
{
|
||||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
auto repoInfo = getRepoInfo(input);
|
||||||
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
if (repoInfo.isLocal) return repoInfo.url;
|
||||||
return url.path;
|
return std::nullopt;
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
void putFile(
|
||||||
|
const Input & input,
|
||||||
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
|
std::optional<std::string> commitMsg) const override
|
||||||
{
|
{
|
||||||
auto sourcePath = getSourcePath(input);
|
auto repoInfo = getRepoInfo(input);
|
||||||
assert(sourcePath);
|
if (!repoInfo.isLocal)
|
||||||
auto gitDir = ".git";
|
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
||||||
|
|
||||||
|
writeFile((CanonPath(repoInfo.url) + path).abs(), contents);
|
||||||
|
|
||||||
runProgram("git", true,
|
runProgram("git", true,
|
||||||
{ "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) });
|
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
|
||||||
|
|
||||||
if (commitMsg)
|
if (commitMsg)
|
||||||
runProgram("git", true,
|
runProgram("git", true,
|
||||||
{ "-C", *sourcePath, "--git-dir", gitDir, "commit", std::string(file), "-m", *commitMsg });
|
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg });
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
struct RepoInfo
|
||||||
{
|
{
|
||||||
|
/* Whether this is a local, non-bare repository. */
|
||||||
|
bool isLocal = false;
|
||||||
|
|
||||||
|
/* Working directory info: the complete list of files, and
|
||||||
|
whether the working directory is dirty compared to HEAD. */
|
||||||
|
GitRepo::WorkdirInfo workdirInfo;
|
||||||
|
|
||||||
|
/* URL of the repo, or its path if isLocal. Never a `file` URL. */
|
||||||
|
std::string url;
|
||||||
|
|
||||||
|
void warnDirty() const
|
||||||
|
{
|
||||||
|
if (workdirInfo.isDirty) {
|
||||||
|
if (!fetchSettings.allowDirty)
|
||||||
|
throw Error("Git tree '%s' is dirty", url);
|
||||||
|
|
||||||
|
if (fetchSettings.warnDirty)
|
||||||
|
warn("Git tree '%s' is dirty", url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string gitDir = ".git";
|
||||||
|
};
|
||||||
|
|
||||||
|
bool getShallowAttr(const Input & input) const
|
||||||
|
{
|
||||||
|
return maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool getSubmodulesAttr(const Input & input) const
|
||||||
|
{
|
||||||
|
return maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool getAllRefsAttr(const Input & input) const
|
||||||
|
{
|
||||||
|
return maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
RepoInfo getRepoInfo(const Input & input) const
|
||||||
|
{
|
||||||
|
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||||
|
{
|
||||||
|
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
||||||
|
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||||
|
};
|
||||||
|
|
||||||
|
if (auto rev = input.getRev())
|
||||||
|
checkHashType(rev);
|
||||||
|
|
||||||
|
RepoInfo repoInfo;
|
||||||
|
|
||||||
// file:// URIs are normally not cloned (but otherwise treated the
|
// file:// URIs are normally not cloned (but otherwise treated the
|
||||||
// same as remote URIs, i.e. we don't use the working tree or
|
// same as remote URIs, i.e. we don't use the working tree or
|
||||||
// HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git
|
// HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git
|
||||||
|
@ -386,153 +381,132 @@ struct GitInputScheme : InputScheme
|
||||||
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
||||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
|
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
|
||||||
bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
|
repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
|
||||||
return {isLocal, isLocal ? url.path : url.base};
|
repoInfo.url = repoInfo.isLocal ? url.path : url.base;
|
||||||
|
|
||||||
|
// If this is a local directory and no ref or revision is
|
||||||
|
// given, then allow the use of an unclean working tree.
|
||||||
|
if (!input.getRef() && !input.getRev() && repoInfo.isLocal)
|
||||||
|
repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo();
|
||||||
|
|
||||||
|
return repoInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
||||||
{
|
{
|
||||||
Input input(_input);
|
Attrs key{{"_what", "gitLastModified"}, {"rev", rev.gitRev()}};
|
||||||
auto gitDir = ".git";
|
|
||||||
|
auto cache = getCache();
|
||||||
|
|
||||||
|
if (auto res = cache->lookup(key))
|
||||||
|
return getIntAttr(*res, "lastModified");
|
||||||
|
|
||||||
|
auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev);
|
||||||
|
|
||||||
|
cache->upsert(key, Attrs{{"lastModified", lastModified}});
|
||||||
|
|
||||||
|
return lastModified;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
|
||||||
|
{
|
||||||
|
Attrs key{{"_what", "gitRevCount"}, {"rev", rev.gitRev()}};
|
||||||
|
|
||||||
|
auto cache = getCache();
|
||||||
|
|
||||||
|
if (auto revCountAttrs = cache->lookup(key))
|
||||||
|
return getIntAttr(*revCountAttrs, "revCount");
|
||||||
|
|
||||||
|
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
|
||||||
|
|
||||||
|
auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev);
|
||||||
|
|
||||||
|
cache->upsert(key, Attrs{{"revCount", revCount}});
|
||||||
|
|
||||||
|
return revCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string getDefaultRef(const RepoInfo & repoInfo) const
|
||||||
|
{
|
||||||
|
auto head = repoInfo.isLocal
|
||||||
|
? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef()
|
||||||
|
: readHeadCached(repoInfo.url);
|
||||||
|
if (!head) {
|
||||||
|
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
|
||||||
|
return "master";
|
||||||
|
}
|
||||||
|
return *head;
|
||||||
|
}
|
||||||
|
|
||||||
|
static MakeNotAllowedError makeNotAllowedError(std::string url)
|
||||||
|
{
|
||||||
|
return [url{std::move(url)}](const CanonPath & path) -> RestrictedPathError
|
||||||
|
{
|
||||||
|
if (nix::pathExists(path.abs()))
|
||||||
|
return RestrictedPathError("access to path '%s' is forbidden because it is not under Git control; maybe you should 'git add' it to the repository '%s'?", path, url);
|
||||||
|
else
|
||||||
|
return RestrictedPathError("path '%s' does not exist in Git repository '%s'", path, url);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
void verifyCommit(const Input & input, std::shared_ptr<GitRepo> repo) const
|
||||||
|
{
|
||||||
|
auto publicKeys = getPublicKeys(input.attrs);
|
||||||
|
auto verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty());
|
||||||
|
|
||||||
|
if (verifyCommit) {
|
||||||
|
if (input.getRev() && repo)
|
||||||
|
repo->verifyCommit(*input.getRev(), publicKeys);
|
||||||
|
else
|
||||||
|
throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, Input> getAccessorFromCommit(
|
||||||
|
ref<Store> store,
|
||||||
|
RepoInfo & repoInfo,
|
||||||
|
Input && input) const
|
||||||
|
{
|
||||||
|
assert(!repoInfo.workdirInfo.isDirty);
|
||||||
|
|
||||||
|
auto origRev = input.getRev();
|
||||||
|
|
||||||
std::string name = input.getName();
|
std::string name = input.getName();
|
||||||
|
|
||||||
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
auto originalRef = input.getRef();
|
||||||
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
auto ref = originalRef ? *originalRef : getDefaultRef(repoInfo);
|
||||||
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
|
input.attrs.insert_or_assign("ref", ref);
|
||||||
|
|
||||||
std::string cacheType = "git";
|
|
||||||
if (shallow) cacheType += "-shallow";
|
|
||||||
if (submodules) cacheType += "-submodules";
|
|
||||||
if (allRefs) cacheType += "-all-refs";
|
|
||||||
|
|
||||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
|
||||||
{
|
|
||||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
|
||||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
|
||||||
};
|
|
||||||
|
|
||||||
auto getLockedAttrs = [&]()
|
|
||||||
{
|
|
||||||
checkHashType(input.getRev());
|
|
||||||
|
|
||||||
return Attrs({
|
|
||||||
{"type", cacheType},
|
|
||||||
{"name", name},
|
|
||||||
{"rev", input.getRev()->gitRev()},
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
|
||||||
-> std::pair<StorePath, Input>
|
|
||||||
{
|
|
||||||
assert(input.getRev());
|
|
||||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
|
||||||
if (!shallow)
|
|
||||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
|
||||||
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
|
||||||
return {std::move(storePath), input};
|
|
||||||
};
|
|
||||||
|
|
||||||
if (input.getRev()) {
|
|
||||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
|
||||||
return makeResult(res->first, std::move(res->second));
|
|
||||||
}
|
|
||||||
|
|
||||||
auto [isLocal, actualUrl_] = getActualUrl(input);
|
|
||||||
auto actualUrl = actualUrl_; // work around clang bug
|
|
||||||
|
|
||||||
/* If this is a local directory and no ref or revision is given,
|
|
||||||
allow fetching directly from a dirty workdir. */
|
|
||||||
if (!input.getRef() && !input.getRev() && isLocal) {
|
|
||||||
auto workdirInfo = getWorkdirInfo(input, actualUrl);
|
|
||||||
if (!workdirInfo.clean) {
|
|
||||||
return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Attrs unlockedAttrs({
|
|
||||||
{"type", cacheType},
|
|
||||||
{"name", name},
|
|
||||||
{"url", actualUrl},
|
|
||||||
});
|
|
||||||
|
|
||||||
Path repoDir;
|
Path repoDir;
|
||||||
|
|
||||||
if (isLocal) {
|
if (repoInfo.isLocal) {
|
||||||
if (!input.getRef()) {
|
repoDir = repoInfo.url;
|
||||||
auto head = readHead(actualUrl);
|
|
||||||
if (!head) {
|
|
||||||
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
|
|
||||||
head = "master";
|
|
||||||
}
|
|
||||||
input.attrs.insert_or_assign("ref", *head);
|
|
||||||
unlockedAttrs.insert_or_assign("ref", *head);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!input.getRev())
|
if (!input.getRev())
|
||||||
input.attrs.insert_or_assign("rev",
|
input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev());
|
||||||
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev());
|
|
||||||
|
|
||||||
repoDir = actualUrl;
|
|
||||||
} else {
|
} else {
|
||||||
const bool useHeadRef = !input.getRef();
|
Path cacheDir = getCachePath(repoInfo.url);
|
||||||
if (useHeadRef) {
|
|
||||||
auto head = readHeadCached(actualUrl);
|
|
||||||
if (!head) {
|
|
||||||
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
|
|
||||||
head = "master";
|
|
||||||
}
|
|
||||||
input.attrs.insert_or_assign("ref", *head);
|
|
||||||
unlockedAttrs.insert_or_assign("ref", *head);
|
|
||||||
} else {
|
|
||||||
if (!input.getRev()) {
|
|
||||||
unlockedAttrs.insert_or_assign("ref", input.getRef().value());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
|
||||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
|
||||||
if (!input.getRev() || input.getRev() == rev2) {
|
|
||||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
|
||||||
return makeResult(res->first, std::move(res->second));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Path cacheDir = getCachePath(actualUrl);
|
|
||||||
repoDir = cacheDir;
|
repoDir = cacheDir;
|
||||||
gitDir = ".";
|
repoInfo.gitDir = ".";
|
||||||
|
|
||||||
createDirs(dirOf(cacheDir));
|
createDirs(dirOf(cacheDir));
|
||||||
PathLocks cacheDirLock({cacheDir + ".lock"});
|
PathLocks cacheDirLock({cacheDir});
|
||||||
|
|
||||||
if (!pathExists(cacheDir)) {
|
auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true);
|
||||||
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
|
|
||||||
}
|
|
||||||
|
|
||||||
Path localRefFile =
|
Path localRefFile =
|
||||||
input.getRef()->compare(0, 5, "refs/") == 0
|
ref.compare(0, 5, "refs/") == 0
|
||||||
? cacheDir + "/" + *input.getRef()
|
? cacheDir + "/" + ref
|
||||||
: cacheDir + "/refs/heads/" + *input.getRef();
|
: cacheDir + "/refs/heads/" + ref;
|
||||||
|
|
||||||
bool doFetch;
|
bool doFetch;
|
||||||
time_t now = time(0);
|
time_t now = time(0);
|
||||||
|
|
||||||
/* If a rev was specified, we need to fetch if it's not in the
|
/* If a rev was specified, we need to fetch if it's not in the
|
||||||
repo. */
|
repo. */
|
||||||
if (input.getRev()) {
|
if (auto rev = input.getRev()) {
|
||||||
try {
|
doFetch = !repo->hasObject(*rev);
|
||||||
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() });
|
|
||||||
doFetch = false;
|
|
||||||
} catch (ExecError & e) {
|
|
||||||
if (WIFEXITED(e.status)) {
|
|
||||||
doFetch = true;
|
|
||||||
} else {
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
if (allRefs) {
|
if (getAllRefsAttr(input)) {
|
||||||
doFetch = true;
|
doFetch = true;
|
||||||
} else {
|
} else {
|
||||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||||
|
@ -544,160 +518,193 @@ struct GitInputScheme : InputScheme
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doFetch) {
|
if (doFetch) {
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
|
|
||||||
|
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
|
||||||
// we're using --quiet for now. Should process its stderr.
|
|
||||||
try {
|
try {
|
||||||
auto ref = input.getRef();
|
auto fetchRef = getAllRefsAttr(input)
|
||||||
auto fetchRef = allRefs
|
|
||||||
? "refs/*"
|
? "refs/*"
|
||||||
: ref->compare(0, 5, "refs/") == 0
|
: ref.compare(0, 5, "refs/") == 0
|
||||||
? *ref
|
? ref
|
||||||
: ref == "HEAD"
|
: ref == "HEAD"
|
||||||
? *ref
|
? ref
|
||||||
: "refs/heads/" + *ref;
|
: "refs/heads/" + ref;
|
||||||
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, {}, true);
|
|
||||||
|
repo->fetch(repoInfo.url, fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
if (!pathExists(localRefFile)) throw;
|
if (!pathExists(localRefFile)) throw;
|
||||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
logError(e.info());
|
||||||
|
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!touchCacheFile(localRefFile, now))
|
if (!touchCacheFile(localRefFile, now))
|
||||||
warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno));
|
warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno));
|
||||||
if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef()))
|
if (!originalRef && !storeCachedHead(repoInfo.url, ref))
|
||||||
warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl);
|
warn("could not update cached head '%s' for '%s'", ref, repoInfo.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input.getRev())
|
if (auto rev = input.getRev()) {
|
||||||
|
if (!repo->hasObject(*rev))
|
||||||
|
throw Error(
|
||||||
|
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
|
||||||
|
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
|
||||||
|
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
|
||||||
|
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
|
||||||
|
rev->gitRev(),
|
||||||
|
ref,
|
||||||
|
repoInfo.url
|
||||||
|
);
|
||||||
|
} else
|
||||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
||||||
|
|
||||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
auto repo = GitRepo::openRepo(CanonPath(repoDir));
|
||||||
|
|
||||||
if (isShallow && !shallow)
|
auto isShallow = repo->isShallow();
|
||||||
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
|
|
||||||
|
|
||||||
// FIXME: check whether rev is an ancestor of ref.
|
if (isShallow && !getShallowAttr(input))
|
||||||
|
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url);
|
||||||
|
|
||||||
printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), actualUrl);
|
// FIXME: check whether rev is an ancestor of ref?
|
||||||
|
|
||||||
/* Now that we know the ref, check again whether we have it in
|
auto rev = *input.getRev();
|
||||||
the store. */
|
|
||||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
|
||||||
return makeResult(res->first, std::move(res->second));
|
|
||||||
|
|
||||||
Path tmpDir = createTempDir();
|
|
||||||
AutoDelete delTmpDir(tmpDir, true);
|
|
||||||
PathFilter filter = defaultPathFilter;
|
|
||||||
|
|
||||||
auto result = runProgram(RunOptions {
|
|
||||||
.program = "git",
|
|
||||||
.args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() },
|
|
||||||
.mergeStderrToStdout = true
|
|
||||||
});
|
|
||||||
if (WEXITSTATUS(result.first) == 128
|
|
||||||
&& result.second.find("bad file") != std::string::npos)
|
|
||||||
{
|
|
||||||
throw Error(
|
|
||||||
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
|
|
||||||
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
|
|
||||||
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
|
|
||||||
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
|
|
||||||
input.getRev()->gitRev(),
|
|
||||||
*input.getRef(),
|
|
||||||
actualUrl
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (submodules) {
|
|
||||||
Path tmpGitDir = createTempDir();
|
|
||||||
AutoDelete delTmpGitDir(tmpGitDir, true);
|
|
||||||
|
|
||||||
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir });
|
|
||||||
|
|
||||||
{
|
|
||||||
// TODO: repoDir might lack the ref (it only checks if rev
|
|
||||||
// exists, see FIXME above) so use a big hammer and fetch
|
|
||||||
// everything to ensure we get the rev.
|
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir));
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
|
|
||||||
"--update-head-ok", "--", repoDir, "refs/*:refs/*" }, {}, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
|
|
||||||
|
|
||||||
/* Ensure that we use the correct origin for fetching
|
|
||||||
submodules. This matters for submodules with relative
|
|
||||||
URLs. */
|
|
||||||
if (isLocal) {
|
|
||||||
writeFile(tmpGitDir + "/config", readFile(repoDir + "/" + gitDir + "/config"));
|
|
||||||
|
|
||||||
/* Restore the config.bare setting we may have just
|
|
||||||
copied erroneously from the user's repo. */
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "config", "core.bare", "false" });
|
|
||||||
} else
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "config", "remote.origin.url", actualUrl });
|
|
||||||
|
|
||||||
/* As an optimisation, copy the modules directory of the
|
|
||||||
source repo if it exists. */
|
|
||||||
auto modulesPath = repoDir + "/" + gitDir + "/modules";
|
|
||||||
if (pathExists(modulesPath)) {
|
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying submodules of '%s'", actualUrl));
|
|
||||||
runProgram("cp", true, { "-R", "--", modulesPath, tmpGitDir + "/modules" });
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl));
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }, {}, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
filter = isNotDotGitDirectory;
|
|
||||||
} else {
|
|
||||||
// FIXME: should pipe this, or find some better way to extract a
|
|
||||||
// revision.
|
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
|
||||||
runProgram2({
|
|
||||||
.program = "git",
|
|
||||||
.args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() },
|
|
||||||
.standardOut = &sink
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
unpackTarfile(*source, tmpDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
|
||||||
|
|
||||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
|
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"rev", input.getRev()->gitRev()},
|
{"rev", rev.gitRev()},
|
||||||
{"lastModified", lastModified},
|
{"lastModified", getLastModified(repoInfo, repoDir, rev)},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!shallow)
|
if (!getShallowAttr(input))
|
||||||
infoAttrs.insert_or_assign("revCount",
|
infoAttrs.insert_or_assign("revCount",
|
||||||
std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() })));
|
getRevCount(repoInfo, repoDir, rev));
|
||||||
|
|
||||||
if (!_input.getRev())
|
printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.url);
|
||||||
getCache()->add(
|
|
||||||
store,
|
|
||||||
unlockedAttrs,
|
|
||||||
infoAttrs,
|
|
||||||
storePath,
|
|
||||||
false);
|
|
||||||
|
|
||||||
getCache()->add(
|
verifyCommit(input, repo);
|
||||||
store,
|
|
||||||
getLockedAttrs(),
|
|
||||||
infoAttrs,
|
|
||||||
storePath,
|
|
||||||
true);
|
|
||||||
|
|
||||||
return makeResult(infoAttrs, std::move(storePath));
|
auto accessor = repo->getAccessor(rev);
|
||||||
|
|
||||||
|
/* If the repo has submodules, fetch them and return a mounted
|
||||||
|
input accessor consisting of the accessor for the top-level
|
||||||
|
repo and the accessors for the submodules. */
|
||||||
|
if (getSubmodulesAttr(input)) {
|
||||||
|
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
|
||||||
|
|
||||||
|
for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev)) {
|
||||||
|
auto resolved = repo->resolveSubmoduleUrl(submodule.url, repoInfo.url);
|
||||||
|
debug("Git submodule %s: %s %s %s -> %s",
|
||||||
|
submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved);
|
||||||
|
fetchers::Attrs attrs;
|
||||||
|
attrs.insert_or_assign("type", "git");
|
||||||
|
attrs.insert_or_assign("url", resolved);
|
||||||
|
if (submodule.branch != "")
|
||||||
|
attrs.insert_or_assign("ref", submodule.branch);
|
||||||
|
attrs.insert_or_assign("rev", submoduleRev.gitRev());
|
||||||
|
auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
|
auto [submoduleAccessor, submoduleInput2] =
|
||||||
|
submoduleInput.getAccessor(store);
|
||||||
|
mounts.insert_or_assign(submodule.path, submoduleAccessor);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mounts.empty()) {
|
||||||
|
mounts.insert_or_assign(CanonPath::root, accessor);
|
||||||
|
accessor = makeMountedInputAccessor(std::move(mounts));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(!origRev || origRev == rev);
|
||||||
|
if (!getShallowAttr(input))
|
||||||
|
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||||
|
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
||||||
|
|
||||||
|
return {accessor, std::move(input)};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, Input> getAccessorFromWorkdir(
|
||||||
|
ref<Store> store,
|
||||||
|
RepoInfo & repoInfo,
|
||||||
|
Input && input) const
|
||||||
|
{
|
||||||
|
if (getSubmodulesAttr(input))
|
||||||
|
/* Create mountpoints for the submodules. */
|
||||||
|
for (auto & submodule : repoInfo.workdirInfo.submodules)
|
||||||
|
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||||
|
|
||||||
|
ref<InputAccessor> accessor =
|
||||||
|
makeFSInputAccessor(CanonPath(repoInfo.url), repoInfo.workdirInfo.files, makeNotAllowedError(repoInfo.url));
|
||||||
|
|
||||||
|
/* If the repo has submodules, return a mounted input accessor
|
||||||
|
consisting of the accessor for the top-level repo and the
|
||||||
|
accessors for the submodule workdirs. */
|
||||||
|
if (getSubmodulesAttr(input) && !repoInfo.workdirInfo.submodules.empty()) {
|
||||||
|
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
|
||||||
|
|
||||||
|
for (auto & submodule : repoInfo.workdirInfo.submodules) {
|
||||||
|
auto submodulePath = CanonPath(repoInfo.url) + submodule.path;
|
||||||
|
fetchers::Attrs attrs;
|
||||||
|
attrs.insert_or_assign("type", "git");
|
||||||
|
attrs.insert_or_assign("url", submodulePath.abs());
|
||||||
|
auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
|
auto [submoduleAccessor, submoduleInput2] =
|
||||||
|
submoduleInput.getAccessor(store);
|
||||||
|
|
||||||
|
/* If the submodule is dirty, mark this repo dirty as
|
||||||
|
well. */
|
||||||
|
if (!submoduleInput2.getRev())
|
||||||
|
repoInfo.workdirInfo.isDirty = true;
|
||||||
|
|
||||||
|
mounts.insert_or_assign(submodule.path, submoduleAccessor);
|
||||||
|
}
|
||||||
|
|
||||||
|
mounts.insert_or_assign(CanonPath::root, accessor);
|
||||||
|
accessor = makeMountedInputAccessor(std::move(mounts));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!repoInfo.workdirInfo.isDirty) {
|
||||||
|
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url));
|
||||||
|
|
||||||
|
if (auto ref = repo->getWorkdirRef())
|
||||||
|
input.attrs.insert_or_assign("ref", *ref);
|
||||||
|
|
||||||
|
auto rev = repoInfo.workdirInfo.headRev.value();
|
||||||
|
|
||||||
|
input.attrs.insert_or_assign("rev", rev.gitRev());
|
||||||
|
input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev));
|
||||||
|
|
||||||
|
verifyCommit(input, repo);
|
||||||
|
} else {
|
||||||
|
repoInfo.warnDirty();
|
||||||
|
|
||||||
|
if (repoInfo.workdirInfo.headRev) {
|
||||||
|
input.attrs.insert_or_assign("dirtyRev",
|
||||||
|
repoInfo.workdirInfo.headRev->gitRev() + "-dirty");
|
||||||
|
input.attrs.insert_or_assign("dirtyShortRev",
|
||||||
|
repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty");
|
||||||
|
}
|
||||||
|
|
||||||
|
verifyCommit(input, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
input.attrs.insert_or_assign(
|
||||||
|
"lastModified",
|
||||||
|
repoInfo.workdirInfo.headRev
|
||||||
|
? getLastModified(repoInfo, repoInfo.url, *repoInfo.workdirInfo.headRev)
|
||||||
|
: 0);
|
||||||
|
|
||||||
|
input.locked = true; // FIXME
|
||||||
|
|
||||||
|
return {accessor, std::move(input)};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||||
|
{
|
||||||
|
Input input(_input);
|
||||||
|
|
||||||
|
auto repoInfo = getRepoInfo(input);
|
||||||
|
|
||||||
|
return
|
||||||
|
input.getRef() || input.getRev() || !repoInfo.isLocal
|
||||||
|
? getAccessorFromCommit(store, repoInfo, std::move(input))
|
||||||
|
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -27,13 +27,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||||
|
|
||||||
struct GitArchiveInputScheme : InputScheme
|
struct GitArchiveInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
virtual std::string type() const = 0;
|
|
||||||
|
|
||||||
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
|
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
|
||||||
|
|
||||||
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
|
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
|
||||||
{
|
{
|
||||||
if (url.scheme != type()) return {};
|
if (url.scheme != schemeName()) return {};
|
||||||
|
|
||||||
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||||
|
|
||||||
|
@ -91,7 +89,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
|
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
|
||||||
|
|
||||||
Input input;
|
Input input;
|
||||||
input.attrs.insert_or_assign("type", type());
|
input.attrs.insert_or_assign("type", std::string { schemeName() });
|
||||||
input.attrs.insert_or_assign("owner", path[0]);
|
input.attrs.insert_or_assign("owner", path[0]);
|
||||||
input.attrs.insert_or_assign("repo", path[1]);
|
input.attrs.insert_or_assign("repo", path[1]);
|
||||||
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
@ -101,14 +99,21 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StringSet allowedAttrs() const override
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
"owner",
|
||||||
|
"repo",
|
||||||
|
"ref",
|
||||||
|
"rev",
|
||||||
|
"narHash",
|
||||||
|
"lastModified",
|
||||||
|
"host",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != type()) return {};
|
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host")
|
|
||||||
throw Error("unsupported input attribute '%s'", name);
|
|
||||||
|
|
||||||
getStrAttr(attrs, "owner");
|
getStrAttr(attrs, "owner");
|
||||||
getStrAttr(attrs, "repo");
|
getStrAttr(attrs, "repo");
|
||||||
|
|
||||||
|
@ -128,7 +133,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
if (ref) path += "/" + *ref;
|
if (ref) path += "/" + *ref;
|
||||||
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
|
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
|
||||||
return ParsedURL {
|
return ParsedURL {
|
||||||
.scheme = type(),
|
.scheme = std::string { schemeName() },
|
||||||
.path = path,
|
.path = path,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -220,7 +225,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
return {result.storePath, input};
|
return {result.storePath, input};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<ExperimentalFeature> experimentalFeature() override
|
std::optional<ExperimentalFeature> experimentalFeature() const override
|
||||||
{
|
{
|
||||||
return Xp::Flakes;
|
return Xp::Flakes;
|
||||||
}
|
}
|
||||||
|
@ -228,7 +233,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
|
|
||||||
struct GitHubInputScheme : GitArchiveInputScheme
|
struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
{
|
{
|
||||||
std::string type() const override { return "github"; }
|
std::string_view schemeName() const override { return "github"; }
|
||||||
|
|
||||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||||
{
|
{
|
||||||
|
@ -309,7 +314,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
|
|
||||||
struct GitLabInputScheme : GitArchiveInputScheme
|
struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
{
|
{
|
||||||
std::string type() const override { return "gitlab"; }
|
std::string_view schemeName() const override { return "gitlab"; }
|
||||||
|
|
||||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||||
{
|
{
|
||||||
|
@ -377,7 +382,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
|
|
||||||
struct SourceHutInputScheme : GitArchiveInputScheme
|
struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
{
|
{
|
||||||
std::string type() const override { return "sourcehut"; }
|
std::string_view schemeName() const override { return "sourcehut"; }
|
||||||
|
|
||||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "url-parts.hh"
|
#include "url-parts.hh"
|
||||||
|
#include "path.hh"
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
@ -49,14 +50,23 @@ struct IndirectInputScheme : InputScheme
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string_view schemeName() const override
|
||||||
|
{
|
||||||
|
return "indirect";
|
||||||
|
}
|
||||||
|
|
||||||
|
StringSet allowedAttrs() const override
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
"id",
|
||||||
|
"ref",
|
||||||
|
"rev",
|
||||||
|
"narHash",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
|
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (name != "type" && name != "id" && name != "ref" && name != "rev" && name != "narHash")
|
|
||||||
throw Error("unsupported indirect input attribute '%s'", name);
|
|
||||||
|
|
||||||
auto id = getStrAttr(attrs, "id");
|
auto id = getStrAttr(attrs, "id");
|
||||||
if (!std::regex_match(id, flakeRegex))
|
if (!std::regex_match(id, flakeRegex))
|
||||||
throw BadURL("'%s' is not a valid flake ID", id);
|
throw BadURL("'%s' is not a valid flake ID", id);
|
||||||
|
@ -92,7 +102,7 @@ struct IndirectInputScheme : InputScheme
|
||||||
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<ExperimentalFeature> experimentalFeature() override
|
std::optional<ExperimentalFeature> experimentalFeature() const override
|
||||||
{
|
{
|
||||||
return Xp::Flakes;
|
return Xp::Flakes;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
#include "source-accessor.hh"
|
#include "source-accessor.hh"
|
||||||
#include "ref.hh"
|
#include "ref.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
|
#include "file-system.hh"
|
||||||
#include "repair-flag.hh"
|
#include "repair-flag.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
|
|
||||||
|
@ -14,7 +16,7 @@ struct SourcePath;
|
||||||
class StorePath;
|
class StorePath;
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
struct InputAccessor : SourceAccessor, std::enable_shared_from_this<InputAccessor>
|
struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<InputAccessor>
|
||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* Return the maximum last-modified time of the files in this
|
* Return the maximum last-modified time of the files in this
|
||||||
|
|
|
@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
||||||
|
|
||||||
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
|
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
|
||||||
|
|
||||||
libfetchers_LDFLAGS += -pthread
|
libfetchers_LDFLAGS += -pthread $(LIBGIT2_LIBS) -larchive
|
||||||
|
|
||||||
libfetchers_LIBS = libutil libstore
|
libfetchers_LIBS = libutil libstore
|
||||||
|
|
|
@ -1,48 +1,16 @@
|
||||||
#include "memory-input-accessor.hh"
|
#include "memory-input-accessor.hh"
|
||||||
|
#include "memory-source-accessor.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct MemoryInputAccessorImpl : MemoryInputAccessor
|
struct MemoryInputAccessorImpl : MemoryInputAccessor, MemorySourceAccessor
|
||||||
{
|
{
|
||||||
std::map<CanonPath, std::string> files;
|
|
||||||
|
|
||||||
std::string readFile(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
auto i = files.find(path);
|
|
||||||
if (i == files.end())
|
|
||||||
throw Error("file '%s' does not exist", path);
|
|
||||||
return i->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool pathExists(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
auto i = files.find(path);
|
|
||||||
return i != files.end();
|
|
||||||
}
|
|
||||||
|
|
||||||
Stat lstat(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
auto i = files.find(path);
|
|
||||||
if (i != files.end())
|
|
||||||
return Stat { .type = tRegular, .isExecutable = false };
|
|
||||||
throw Error("file '%s' does not exist", path);
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntries readDirectory(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string readLink(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
throw UnimplementedError("MemoryInputAccessor::readLink");
|
|
||||||
}
|
|
||||||
|
|
||||||
SourcePath addFile(CanonPath path, std::string && contents) override
|
SourcePath addFile(CanonPath path, std::string && contents) override
|
||||||
{
|
{
|
||||||
files.emplace(path, std::move(contents));
|
return {
|
||||||
|
ref(shared_from_this()),
|
||||||
return {ref(shared_from_this()), std::move(path)};
|
MemorySourceAccessor::addFile(path, std::move(contents))
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
|
#include "processes.hh"
|
||||||
|
#include "users.hh"
|
||||||
#include "cache.hh"
|
#include "cache.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "tarfile.hh"
|
#include "tarfile.hh"
|
||||||
|
@ -69,14 +71,25 @@ struct MercurialInputScheme : InputScheme
|
||||||
return inputFromAttrs(attrs);
|
return inputFromAttrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string_view schemeName() const override
|
||||||
|
{
|
||||||
|
return "hg";
|
||||||
|
}
|
||||||
|
|
||||||
|
StringSet allowedAttrs() const override
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
"url",
|
||||||
|
"ref",
|
||||||
|
"rev",
|
||||||
|
"revCount",
|
||||||
|
"narHash",
|
||||||
|
"name",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
|
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name")
|
|
||||||
throw Error("unsupported Mercurial input attribute '%s'", name);
|
|
||||||
|
|
||||||
parseURL(getStrAttr(attrs, "url"));
|
parseURL(getStrAttr(attrs, "url"));
|
||||||
|
|
||||||
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
||||||
|
@ -109,7 +122,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Path> getSourcePath(const Input & input) override
|
std::optional<Path> getSourcePath(const Input & input) const override
|
||||||
{
|
{
|
||||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
||||||
|
@ -117,18 +130,27 @@ struct MercurialInputScheme : InputScheme
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
void putFile(
|
||||||
|
const Input & input,
|
||||||
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
|
std::optional<std::string> commitMsg) const override
|
||||||
{
|
{
|
||||||
auto sourcePath = getSourcePath(input);
|
auto [isLocal, repoPath] = getActualUrl(input);
|
||||||
assert(sourcePath);
|
if (!isLocal)
|
||||||
|
throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string());
|
||||||
|
|
||||||
|
auto absPath = CanonPath(repoPath) + path;
|
||||||
|
|
||||||
|
writeFile(absPath.abs(), contents);
|
||||||
|
|
||||||
// FIXME: shut up if file is already tracked.
|
// FIXME: shut up if file is already tracked.
|
||||||
runHg(
|
runHg(
|
||||||
{ "add", *sourcePath + "/" + std::string(file) });
|
{ "add", absPath.abs() });
|
||||||
|
|
||||||
if (commitMsg)
|
if (commitMsg)
|
||||||
runHg(
|
runHg(
|
||||||
{ "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
|
{ "commit", absPath.abs(), "-m", *commitMsg });
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||||
|
|
77
src/libfetchers/mounted-input-accessor.cc
Normal file
77
src/libfetchers/mounted-input-accessor.cc
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
#include "mounted-input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
struct MountedInputAccessor : InputAccessor
|
||||||
|
{
|
||||||
|
std::map<CanonPath, ref<InputAccessor>> mounts;
|
||||||
|
|
||||||
|
MountedInputAccessor(std::map<CanonPath, ref<InputAccessor>> _mounts)
|
||||||
|
: mounts(std::move(_mounts))
|
||||||
|
{
|
||||||
|
// Currently we require a root filesystem. This could be relaxed.
|
||||||
|
assert(mounts.contains(CanonPath::root));
|
||||||
|
|
||||||
|
// FIXME: return dummy parent directories automatically?
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string readFile(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
auto [accessor, subpath] = resolve(path);
|
||||||
|
return accessor->readFile(subpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool pathExists(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
auto [accessor, subpath] = resolve(path);
|
||||||
|
return accessor->pathExists(subpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
auto [accessor, subpath] = resolve(path);
|
||||||
|
return accessor->maybeLstat(subpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
DirEntries readDirectory(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
auto [accessor, subpath] = resolve(path);
|
||||||
|
return accessor->readDirectory(subpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string readLink(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
auto [accessor, subpath] = resolve(path);
|
||||||
|
return accessor->readLink(subpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string showPath(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
auto [accessor, subpath] = resolve(path);
|
||||||
|
return accessor->showPath(subpath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<ref<InputAccessor>, CanonPath> resolve(CanonPath path)
|
||||||
|
{
|
||||||
|
// Find the nearest parent of `path` that is a mount point.
|
||||||
|
std::vector<std::string> subpath;
|
||||||
|
while (true) {
|
||||||
|
auto i = mounts.find(path);
|
||||||
|
if (i != mounts.end()) {
|
||||||
|
std::reverse(subpath.begin(), subpath.end());
|
||||||
|
return {i->second, CanonPath(subpath)};
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(!path.isRoot());
|
||||||
|
subpath.push_back(std::string(*path.baseName()));
|
||||||
|
path.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ref<InputAccessor> makeMountedInputAccessor(std::map<CanonPath, ref<InputAccessor>> mounts)
|
||||||
|
{
|
||||||
|
return make_ref<MountedInputAccessor>(std::move(mounts));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
9
src/libfetchers/mounted-input-accessor.hh
Normal file
9
src/libfetchers/mounted-input-accessor.hh
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
ref<InputAccessor> makeMountedInputAccessor(std::map<CanonPath, ref<InputAccessor>> mounts);
|
||||||
|
|
||||||
|
}
|
|
@ -32,23 +32,30 @@ struct PathInputScheme : InputScheme
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string_view schemeName() const override
|
||||||
|
{
|
||||||
|
return "path";
|
||||||
|
}
|
||||||
|
|
||||||
|
StringSet allowedAttrs() const override
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
"path",
|
||||||
|
/* Allow the user to pass in "fake" tree info
|
||||||
|
attributes. This is useful for making a pinned tree work
|
||||||
|
the same as the repository from which is exported (e.g.
|
||||||
|
path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...).
|
||||||
|
*/
|
||||||
|
"rev",
|
||||||
|
"revCount",
|
||||||
|
"lastModified",
|
||||||
|
"narHash",
|
||||||
|
};
|
||||||
|
}
|
||||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != "path") return {};
|
|
||||||
|
|
||||||
getStrAttr(attrs, "path");
|
getStrAttr(attrs, "path");
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
/* Allow the user to pass in "fake" tree info
|
|
||||||
attributes. This is useful for making a pinned tree
|
|
||||||
work the same as the repository from which is exported
|
|
||||||
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
|
|
||||||
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path")
|
|
||||||
// checked in Input::fromAttrs
|
|
||||||
;
|
|
||||||
else
|
|
||||||
throw Error("unsupported path input attribute '%s'", name);
|
|
||||||
|
|
||||||
Input input;
|
Input input;
|
||||||
input.attrs = attrs;
|
input.attrs = attrs;
|
||||||
return input;
|
return input;
|
||||||
|
@ -66,14 +73,28 @@ struct PathInputScheme : InputScheme
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Path> getSourcePath(const Input & input) override
|
std::optional<Path> getSourcePath(const Input & input) const override
|
||||||
{
|
{
|
||||||
return getStrAttr(input.attrs, "path");
|
return getStrAttr(input.attrs, "path");
|
||||||
}
|
}
|
||||||
|
|
||||||
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
void putFile(
|
||||||
|
const Input & input,
|
||||||
|
const CanonPath & path,
|
||||||
|
std::string_view contents,
|
||||||
|
std::optional<std::string> commitMsg) const override
|
||||||
{
|
{
|
||||||
// nothing to do
|
writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents);
|
||||||
|
}
|
||||||
|
|
||||||
|
CanonPath getAbsPath(const Input & input) const
|
||||||
|
{
|
||||||
|
auto path = getStrAttr(input.attrs, "path");
|
||||||
|
|
||||||
|
if (path[0] == '/')
|
||||||
|
return CanonPath(path);
|
||||||
|
|
||||||
|
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||||
|
@ -121,7 +142,7 @@ struct PathInputScheme : InputScheme
|
||||||
return {std::move(*storePath), input};
|
return {std::move(*storePath), input};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<ExperimentalFeature> experimentalFeature() override
|
std::optional<ExperimentalFeature> experimentalFeature() const override
|
||||||
{
|
{
|
||||||
return Xp::Flakes;
|
return Xp::Flakes;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#include "registry.hh"
|
#include "registry.hh"
|
||||||
#include "tarball.hh"
|
#include "tarball.hh"
|
||||||
#include "util.hh"
|
#include "users.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
|
|
|
@ -184,7 +184,6 @@ DownloadTarballResult downloadTarball(
|
||||||
// An input scheme corresponding to a curl-downloadable resource.
|
// An input scheme corresponding to a curl-downloadable resource.
|
||||||
struct CurlInputScheme : InputScheme
|
struct CurlInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
virtual const std::string inputType() const = 0;
|
|
||||||
const std::set<std::string> transportUrlSchemes = {"file", "http", "https"};
|
const std::set<std::string> transportUrlSchemes = {"file", "http", "https"};
|
||||||
|
|
||||||
const bool hasTarballExtension(std::string_view path) const
|
const bool hasTarballExtension(std::string_view path) const
|
||||||
|
@ -222,22 +221,27 @@ struct CurlInputScheme : InputScheme
|
||||||
url.query.erase("rev");
|
url.query.erase("rev");
|
||||||
url.query.erase("revCount");
|
url.query.erase("revCount");
|
||||||
|
|
||||||
input.attrs.insert_or_assign("type", inputType());
|
input.attrs.insert_or_assign("type", std::string { schemeName() });
|
||||||
input.attrs.insert_or_assign("url", url.to_string());
|
input.attrs.insert_or_assign("url", url.to_string());
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StringSet allowedAttrs() const override
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
"type",
|
||||||
|
"url",
|
||||||
|
"narHash",
|
||||||
|
"name",
|
||||||
|
"unpack",
|
||||||
|
"rev",
|
||||||
|
"revCount",
|
||||||
|
"lastModified",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
|
||||||
{
|
{
|
||||||
auto type = maybeGetStrAttr(attrs, "type");
|
|
||||||
if (type != inputType()) return {};
|
|
||||||
|
|
||||||
// FIXME: some of these only apply to TarballInputScheme.
|
|
||||||
std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount", "lastModified"};
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (!allowedNames.count(name))
|
|
||||||
throw Error("unsupported %s input attribute '%s'", *type, name);
|
|
||||||
|
|
||||||
Input input;
|
Input input;
|
||||||
input.attrs = attrs;
|
input.attrs = attrs;
|
||||||
|
|
||||||
|
@ -258,14 +262,14 @@ struct CurlInputScheme : InputScheme
|
||||||
|
|
||||||
struct FileInputScheme : CurlInputScheme
|
struct FileInputScheme : CurlInputScheme
|
||||||
{
|
{
|
||||||
const std::string inputType() const override { return "file"; }
|
std::string_view schemeName() const override { return "file"; }
|
||||||
|
|
||||||
bool isValidURL(const ParsedURL & url, bool requireTree) const override
|
bool isValidURL(const ParsedURL & url, bool requireTree) const override
|
||||||
{
|
{
|
||||||
auto parsedUrlScheme = parseUrlScheme(url.scheme);
|
auto parsedUrlScheme = parseUrlScheme(url.scheme);
|
||||||
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
|
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
|
||||||
&& (parsedUrlScheme.application
|
&& (parsedUrlScheme.application
|
||||||
? parsedUrlScheme.application.value() == inputType()
|
? parsedUrlScheme.application.value() == schemeName()
|
||||||
: (!requireTree && !hasTarballExtension(url.path)));
|
: (!requireTree && !hasTarballExtension(url.path)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -278,7 +282,7 @@ struct FileInputScheme : CurlInputScheme
|
||||||
|
|
||||||
struct TarballInputScheme : CurlInputScheme
|
struct TarballInputScheme : CurlInputScheme
|
||||||
{
|
{
|
||||||
const std::string inputType() const override { return "tarball"; }
|
std::string_view schemeName() const override { return "tarball"; }
|
||||||
|
|
||||||
bool isValidURL(const ParsedURL & url, bool requireTree) const override
|
bool isValidURL(const ParsedURL & url, bool requireTree) const override
|
||||||
{
|
{
|
||||||
|
@ -286,7 +290,7 @@ struct TarballInputScheme : CurlInputScheme
|
||||||
|
|
||||||
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
|
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
|
||||||
&& (parsedUrlScheme.application
|
&& (parsedUrlScheme.application
|
||||||
? parsedUrlScheme.application.value() == inputType()
|
? parsedUrlScheme.application.value() == schemeName()
|
||||||
: (requireTree || hasTarballExtension(url.path)));
|
: (requireTree || hasTarballExtension(url.path)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
#include "common-args.hh"
|
#include "common-args.hh"
|
||||||
#include "args/root.hh"
|
#include "args/root.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
#include "logging.hh"
|
||||||
#include "loggers.hh"
|
#include "loggers.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#include "loggers.hh"
|
#include "loggers.hh"
|
||||||
|
#include "environment-variables.hh"
|
||||||
#include "progress-bar.hh"
|
#include "progress-bar.hh"
|
||||||
#include "util.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#include "progress-bar.hh"
|
#include "progress-bar.hh"
|
||||||
#include "util.hh"
|
#include "terminal.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "names.hh"
|
#include "names.hh"
|
||||||
|
@ -340,6 +340,14 @@ public:
|
||||||
state->activitiesByType[type].expected += j;
|
state->activitiesByType[type].expected += j;
|
||||||
update(*state);
|
update(*state);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
else if (type == resFetchStatus) {
|
||||||
|
auto i = state->its.find(act);
|
||||||
|
assert(i != state->its.end());
|
||||||
|
ActInfo & actInfo = *i->second;
|
||||||
|
actInfo.lastLine = getS(fields, 0);
|
||||||
|
update(*state);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void update(State & state)
|
void update(State & state)
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
#include "current-process.hh"
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "gc-store.hh"
|
#include "gc-store.hh"
|
||||||
#include "util.hh"
|
|
||||||
#include "loggers.hh"
|
#include "loggers.hh"
|
||||||
#include "progress-bar.hh"
|
#include "progress-bar.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cctype>
|
#include <cctype>
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "util.hh"
|
#include "processes.hh"
|
||||||
#include "args.hh"
|
#include "args.hh"
|
||||||
#include "args/root.hh"
|
#include "args/root.hh"
|
||||||
#include "common-args.hh"
|
#include "common-args.hh"
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
#include "binary-cache-store.hh"
|
#include "binary-cache-store.hh"
|
||||||
#include "compression.hh"
|
#include "compression.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "fs-accessor.hh"
|
#include "source-accessor.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "nar-info.hh"
|
#include "nar-info.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
|
@ -11,6 +11,7 @@
|
||||||
#include "nar-accessor.hh"
|
#include "nar-accessor.hh"
|
||||||
#include "thread-pool.hh"
|
#include "thread-pool.hh"
|
||||||
#include "callback.hh"
|
#include "callback.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include <future>
|
#include <future>
|
||||||
|
@ -143,7 +144,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
write the compressed NAR to disk), into a HashSink (to get the
|
write the compressed NAR to disk), into a HashSink (to get the
|
||||||
NAR hash), and into a NarAccessor (to get the NAR listing). */
|
NAR hash), and into a NarAccessor (to get the NAR listing). */
|
||||||
HashSink fileHashSink { htSHA256 };
|
HashSink fileHashSink { htSHA256 };
|
||||||
std::shared_ptr<FSAccessor> narAccessor;
|
std::shared_ptr<SourceAccessor> narAccessor;
|
||||||
HashSink narHashSink { htSHA256 };
|
HashSink narHashSink { htSHA256 };
|
||||||
{
|
{
|
||||||
FdSink fileSink(fdTemp.get());
|
FdSink fileSink(fdTemp.get());
|
||||||
|
@ -195,7 +196,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
if (writeNARListing) {
|
if (writeNARListing) {
|
||||||
nlohmann::json j = {
|
nlohmann::json j = {
|
||||||
{"version", 1},
|
{"version", 1},
|
||||||
{"root", listNar(ref<FSAccessor>(narAccessor), "", true)},
|
{"root", listNar(ref<SourceAccessor>(narAccessor), CanonPath::root, true)},
|
||||||
};
|
};
|
||||||
|
|
||||||
upsertFile(std::string(info.path.hashPart()) + ".ls", j.dump(), "application/json");
|
upsertFile(std::string(info.path.hashPart()) + ".ls", j.dump(), "application/json");
|
||||||
|
@ -206,9 +207,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
specify the NAR file and member containing the debug info. */
|
specify the NAR file and member containing the debug info. */
|
||||||
if (writeDebugInfo) {
|
if (writeDebugInfo) {
|
||||||
|
|
||||||
std::string buildIdDir = "/lib/debug/.build-id";
|
CanonPath buildIdDir("lib/debug/.build-id");
|
||||||
|
|
||||||
if (narAccessor->stat(buildIdDir).type == FSAccessor::tDirectory) {
|
if (auto st = narAccessor->maybeLstat(buildIdDir); st && st->type == SourceAccessor::tDirectory) {
|
||||||
|
|
||||||
ThreadPool threadPool(25);
|
ThreadPool threadPool(25);
|
||||||
|
|
||||||
|
@ -231,17 +232,17 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
std::regex regex1("^[0-9a-f]{2}$");
|
std::regex regex1("^[0-9a-f]{2}$");
|
||||||
std::regex regex2("^[0-9a-f]{38}\\.debug$");
|
std::regex regex2("^[0-9a-f]{38}\\.debug$");
|
||||||
|
|
||||||
for (auto & s1 : narAccessor->readDirectory(buildIdDir)) {
|
for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) {
|
||||||
auto dir = buildIdDir + "/" + s1;
|
auto dir = buildIdDir + s1;
|
||||||
|
|
||||||
if (narAccessor->stat(dir).type != FSAccessor::tDirectory
|
if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory
|
||||||
|| !std::regex_match(s1, regex1))
|
|| !std::regex_match(s1, regex1))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
for (auto & s2 : narAccessor->readDirectory(dir)) {
|
for (auto & [s2, _type] : narAccessor->readDirectory(dir)) {
|
||||||
auto debugPath = dir + "/" + s2;
|
auto debugPath = dir + s2;
|
||||||
|
|
||||||
if (narAccessor->stat(debugPath).type != FSAccessor::tRegular
|
if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular
|
||||||
|| !std::regex_match(s2, regex2))
|
|| !std::regex_match(s2, regex2))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
@ -250,7 +251,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
std::string key = "debuginfo/" + buildId;
|
std::string key = "debuginfo/" + buildId;
|
||||||
std::string target = "../" + narInfo->url;
|
std::string target = "../" + narInfo->url;
|
||||||
|
|
||||||
threadPool.enqueue(std::bind(doFile, std::string(debugPath, 1), key, target));
|
threadPool.enqueue(std::bind(doFile, std::string(debugPath.rel()), key, target));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -503,9 +504,9 @@ void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
|
||||||
upsertFile(filePath, info.toJSON().dump(), "application/json");
|
upsertFile(filePath, info.toJSON().dump(), "application/json");
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<FSAccessor> BinaryCacheStore::getFSAccessor()
|
ref<SourceAccessor> BinaryCacheStore::getFSAccessor(bool requireValidPath)
|
||||||
{
|
{
|
||||||
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()), localNarCache);
|
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()), requireValidPath, localNarCache);
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSet & sigs)
|
void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSet & sigs)
|
||||||
|
|
|
@ -17,28 +17,28 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
|
||||||
{
|
{
|
||||||
using StoreConfig::StoreConfig;
|
using StoreConfig::StoreConfig;
|
||||||
|
|
||||||
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression",
|
const Setting<std::string> compression{this, "xz", "compression",
|
||||||
"NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."};
|
"NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."};
|
||||||
|
|
||||||
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing",
|
const Setting<bool> writeNARListing{this, false, "write-nar-listing",
|
||||||
"Whether to write a JSON file that lists the files in each NAR."};
|
"Whether to write a JSON file that lists the files in each NAR."};
|
||||||
|
|
||||||
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info",
|
const Setting<bool> writeDebugInfo{this, false, "index-debug-info",
|
||||||
R"(
|
R"(
|
||||||
Whether to index DWARF debug info files by build ID. This allows [`dwarffs`](https://github.com/edolstra/dwarffs) to
|
Whether to index DWARF debug info files by build ID. This allows [`dwarffs`](https://github.com/edolstra/dwarffs) to
|
||||||
fetch debug info on demand
|
fetch debug info on demand
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key",
|
const Setting<Path> secretKeyFile{this, "", "secret-key",
|
||||||
"Path to the secret key used to sign the binary cache."};
|
"Path to the secret key used to sign the binary cache."};
|
||||||
|
|
||||||
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache",
|
const Setting<Path> localNarCache{this, "", "local-nar-cache",
|
||||||
"Path to a local cache of NARs fetched from this binary cache, used by commands such as `nix store cat`."};
|
"Path to a local cache of NARs fetched from this binary cache, used by commands such as `nix store cat`."};
|
||||||
|
|
||||||
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
|
const Setting<bool> parallelCompression{this, false, "parallel-compression",
|
||||||
"Enable multi-threaded compression of NARs. This is currently only available for `xz` and `zstd`."};
|
"Enable multi-threaded compression of NARs. This is currently only available for `xz` and `zstd`."};
|
||||||
|
|
||||||
const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
|
const Setting<int> compressionLevel{this, -1, "compression-level",
|
||||||
R"(
|
R"(
|
||||||
The *preset level* to be used when compressing NARs.
|
The *preset level* to be used when compressing NARs.
|
||||||
The meaning and accepted values depend on the compression method selected.
|
The meaning and accepted values depend on the compression method selected.
|
||||||
|
@ -148,7 +148,7 @@ public:
|
||||||
|
|
||||||
void narFromPath(const StorePath & path, Sink & sink) override;
|
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||||
|
|
||||||
ref<FSAccessor> getFSAccessor() override;
|
ref<SourceAccessor> getFSAccessor(bool requireValidPath) override;
|
||||||
|
|
||||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||||
|
|
||||||
|
|
37
src/libstore/build/child.cc
Normal file
37
src/libstore/build/child.cc
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
#include "child.hh"
|
||||||
|
#include "current-process.hh"
|
||||||
|
#include "logging.hh"
|
||||||
|
|
||||||
|
#include <fcntl.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
void commonChildInit()
|
||||||
|
{
|
||||||
|
logger = makeSimpleLogger();
|
||||||
|
|
||||||
|
const static std::string pathNullDevice = "/dev/null";
|
||||||
|
restoreProcessContext(false);
|
||||||
|
|
||||||
|
/* Put the child in a separate session (and thus a separate
|
||||||
|
process group) so that it has no controlling terminal (meaning
|
||||||
|
that e.g. ssh cannot open /dev/tty) and it doesn't receive
|
||||||
|
terminal signals. */
|
||||||
|
if (setsid() == -1)
|
||||||
|
throw SysError("creating a new session");
|
||||||
|
|
||||||
|
/* Dup stderr to stdout. */
|
||||||
|
if (dup2(STDERR_FILENO, STDOUT_FILENO) == -1)
|
||||||
|
throw SysError("cannot dup stderr into stdout");
|
||||||
|
|
||||||
|
/* Reroute stdin to /dev/null. */
|
||||||
|
int fdDevNull = open(pathNullDevice.c_str(), O_RDWR);
|
||||||
|
if (fdDevNull == -1)
|
||||||
|
throw SysError("cannot open '%1%'", pathNullDevice);
|
||||||
|
if (dup2(fdDevNull, STDIN_FILENO) == -1)
|
||||||
|
throw SysError("cannot dup null device into stdin");
|
||||||
|
close(fdDevNull);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
11
src/libstore/build/child.hh
Normal file
11
src/libstore/build/child.hh
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Common initialisation performed in child processes.
|
||||||
|
*/
|
||||||
|
void commonChildInit();
|
||||||
|
|
||||||
|
}
|
|
@ -1317,9 +1317,26 @@ void DerivationGoal::handleChildOutput(int fd, std::string_view data)
|
||||||
auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true);
|
auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true);
|
||||||
// ensure that logs from a builder using `ssh-ng://` as protocol
|
// ensure that logs from a builder using `ssh-ng://` as protocol
|
||||||
// are also available to `nix log`.
|
// are also available to `nix log`.
|
||||||
if (s && !isWrittenToLog && logSink && (*json)["type"] == resBuildLogLine) {
|
if (s && !isWrittenToLog && logSink) {
|
||||||
auto f = (*json)["fields"];
|
const auto type = (*json)["type"];
|
||||||
(*logSink)((f.size() > 0 ? f.at(0).get<std::string>() : "") + "\n");
|
const auto fields = (*json)["fields"];
|
||||||
|
if (type == resBuildLogLine) {
|
||||||
|
(*logSink)((fields.size() > 0 ? fields[0].get<std::string>() : "") + "\n");
|
||||||
|
} else if (type == resSetPhase && ! fields.is_null()) {
|
||||||
|
const auto phase = fields[0];
|
||||||
|
if (! phase.is_null()) {
|
||||||
|
// nixpkgs' stdenv produces lines in the log to signal
|
||||||
|
// phase changes.
|
||||||
|
// We want to get the same lines in case of remote builds.
|
||||||
|
// The format is:
|
||||||
|
// @nix { "action": "setPhase", "phase": "$curPhase" }
|
||||||
|
const auto logLine = nlohmann::json::object({
|
||||||
|
{"action", "setPhase"},
|
||||||
|
{"phase", phase}
|
||||||
|
});
|
||||||
|
(*logSink)("@nix " + logLine.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace) + "\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
currentHookLine.clear();
|
currentHookLine.clear();
|
||||||
|
@ -1474,6 +1491,7 @@ void DerivationGoal::done(
|
||||||
SingleDrvOutputs builtOutputs,
|
SingleDrvOutputs builtOutputs,
|
||||||
std::optional<Error> ex)
|
std::optional<Error> ex)
|
||||||
{
|
{
|
||||||
|
outputLocks.unlock();
|
||||||
buildResult.status = status;
|
buildResult.status = status;
|
||||||
if (ex)
|
if (ex)
|
||||||
buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg));
|
buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg));
|
||||||
|
|
|
@ -15,7 +15,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||||
|
|
||||||
worker.run(goals);
|
worker.run(goals);
|
||||||
|
|
||||||
StorePathSet failed;
|
StringSet failed;
|
||||||
std::optional<Error> ex;
|
std::optional<Error> ex;
|
||||||
for (auto & i : goals) {
|
for (auto & i : goals) {
|
||||||
if (i->ex) {
|
if (i->ex) {
|
||||||
|
@ -26,9 +26,9 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||||
}
|
}
|
||||||
if (i->exitCode != Goal::ecSuccess) {
|
if (i->exitCode != Goal::ecSuccess) {
|
||||||
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
|
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
|
||||||
failed.insert(i2->drvPath);
|
failed.insert(std::string { i2->drvPath.to_string() });
|
||||||
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
||||||
failed.insert(i2->storePath);
|
failed.insert(std::string { i2->storePath.to_string()});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||||
throw std::move(*ex);
|
throw std::move(*ex);
|
||||||
} else if (!failed.empty()) {
|
} else if (!failed.empty()) {
|
||||||
if (ex) logError(ex->info());
|
if (ex) logError(ex->info());
|
||||||
throw Error(worker.failingExitStatus(), "build of %s failed", showPaths(failed));
|
throw Error(worker.failingExitStatus(), "build of %s failed", concatStringsSep(", ", quoteStrings(failed)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue