mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 05:56:15 +02:00
Merge remote-tracking branch 'nixos/master'
This commit is contained in:
commit
90e32c2a42
757 changed files with 3860 additions and 2044 deletions
2
.github/labeler.yml
vendored
2
.github/labeler.yml
vendored
|
@ -20,4 +20,4 @@
|
||||||
# Unit tests
|
# Unit tests
|
||||||
- src/*/tests/**/*
|
- src/*/tests/**/*
|
||||||
# Functional and integration tests
|
# Functional and integration tests
|
||||||
- tests/**/*
|
- tests/functional/**/*
|
||||||
|
|
34
.gitignore
vendored
34
.gitignore
vendored
|
@ -79,24 +79,24 @@ perl/Makefile.config
|
||||||
|
|
||||||
/src/build-remote/build-remote
|
/src/build-remote/build-remote
|
||||||
|
|
||||||
# /tests/
|
# /tests/functional/
|
||||||
/tests/test-tmp
|
/tests/functional/test-tmp
|
||||||
/tests/common/vars-and-functions.sh
|
/tests/functional/common/vars-and-functions.sh
|
||||||
/tests/result*
|
/tests/functional/result*
|
||||||
/tests/restricted-innocent
|
/tests/functional/restricted-innocent
|
||||||
/tests/shell
|
/tests/functional/shell
|
||||||
/tests/shell.drv
|
/tests/functional/shell.drv
|
||||||
/tests/config.nix
|
/tests/functional/config.nix
|
||||||
/tests/ca/config.nix
|
/tests/functional/ca/config.nix
|
||||||
/tests/dyn-drv/config.nix
|
/tests/functional/dyn-drv/config.nix
|
||||||
/tests/repl-result-out
|
/tests/functional/repl-result-out
|
||||||
/tests/test-libstoreconsumer/test-libstoreconsumer
|
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
||||||
|
|
||||||
# /tests/lang/
|
# /tests/functional/lang/
|
||||||
/tests/lang/*.out
|
/tests/functional/lang/*.out
|
||||||
/tests/lang/*.out.xml
|
/tests/functional/lang/*.out.xml
|
||||||
/tests/lang/*.err
|
/tests/functional/lang/*.err
|
||||||
/tests/lang/*.ast
|
/tests/functional/lang/*.ast
|
||||||
|
|
||||||
/perl/lib/Nix/Config.pm
|
/perl/lib/Nix/Config.pm
|
||||||
/perl/lib/Nix/Store.cc
|
/perl/lib/Nix/Store.cc
|
||||||
|
|
|
@ -24,25 +24,30 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
||||||
|
|
||||||
## Making changes to Nix
|
## Making changes to Nix
|
||||||
|
|
||||||
1. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make.
|
1. Search for related issues that cover what you're going to work on.
|
||||||
There are many open pull requests that might already do what you intent to work on.
|
It could help to mention there that you will work on the issue.
|
||||||
You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics.
|
|
||||||
|
|
||||||
2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue.
|
|
||||||
|
|
||||||
Issues labeled [good first issue](https://github.com/NixOS/nix/labels/good-first-issue) should be relatively easy to fix and are likely to get merged quickly.
|
Issues labeled [good first issue](https://github.com/NixOS/nix/labels/good-first-issue) should be relatively easy to fix and are likely to get merged quickly.
|
||||||
Pull requests addressing issues labeled [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) are especially welcomed by maintainers and will receive prioritised review.
|
Pull requests addressing issues labeled [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) or [RFC](https://github.com/NixOS/nix/labels/RFC) are especially welcomed by maintainers and will receive prioritised review.
|
||||||
|
|
||||||
|
If there is no relevant issue yet and you're not sure whether your change is likely to be accepted, [open an issue](https://github.com/NixOS/nix/issues/new/choose) yourself.
|
||||||
|
|
||||||
|
2. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make.
|
||||||
|
There are many open pull requests that might already do what you intend to work on.
|
||||||
|
You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics.
|
||||||
|
|
||||||
3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests.
|
3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests.
|
||||||
|
|
||||||
For contributions to the command line interface, please check the [CLI guidelines](https://nixos.org/manual/nix/unstable/contributing/cli-guideline.html).
|
For contributions to the command line interface, please check the [CLI guidelines](https://nixos.org/manual/nix/unstable/contributing/cli-guideline.html).
|
||||||
|
|
||||||
4. Make your changes!
|
4. Make your change!
|
||||||
|
|
||||||
5. [Create a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) for your changes.
|
5. [Create a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) for your changes.
|
||||||
* Link related issues in your pull request to inform interested parties and future contributors about your change.
|
* Clearly explain the problem that you're solving.
|
||||||
|
|
||||||
|
Link related issues to inform interested parties and future contributors about your change.
|
||||||
|
If your pull request closes one or multiple issues, mention that in the description using `Closes: #<number>`, as it will then happen automatically when your change is merged.
|
||||||
* Make sure to have [a clean history of commits on your branch by using rebase](https://www.digitalocean.com/community/tutorials/how-to-rebase-and-update-a-pull-request).
|
* Make sure to have [a clean history of commits on your branch by using rebase](https://www.digitalocean.com/community/tutorials/how-to-rebase-and-update-a-pull-request).
|
||||||
If your pull request closes one or multiple issues, note that in the description using `Closes: #<number>`, as it will then happen automatically when your change is merged.
|
|
||||||
* [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes.
|
* [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes.
|
||||||
|
|
||||||
6. Do not expect your pull request to be reviewed immediately.
|
6. Do not expect your pull request to be reviewed immediately.
|
||||||
|
@ -52,7 +57,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
||||||
|
|
||||||
- [ ] Fixes an [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) issue
|
- [ ] Fixes an [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) issue
|
||||||
- [ ] Tests, as appropriate:
|
- [ ] Tests, as appropriate:
|
||||||
- Functional tests – [`tests/**.sh`](./tests)
|
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
||||||
- Unit tests – [`src/*/tests`](./src/)
|
- Unit tests – [`src/*/tests`](./src/)
|
||||||
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
||||||
- [ ] User documentation in the [manual](..doc/manual/src)
|
- [ ] User documentation in the [manual](..doc/manual/src)
|
||||||
|
|
26
Makefile
26
Makefile
|
@ -1,3 +1,7 @@
|
||||||
|
-include Makefile.config
|
||||||
|
clean-files += Makefile.config
|
||||||
|
|
||||||
|
ifeq ($(ENABLE_BUILD), yes)
|
||||||
makefiles = \
|
makefiles = \
|
||||||
mk/precompiled-headers.mk \
|
mk/precompiled-headers.mk \
|
||||||
local.mk \
|
local.mk \
|
||||||
|
@ -18,19 +22,23 @@ makefiles = \
|
||||||
misc/upstart/local.mk \
|
misc/upstart/local.mk \
|
||||||
doc/manual/local.mk \
|
doc/manual/local.mk \
|
||||||
doc/internal-api/local.mk
|
doc/internal-api/local.mk
|
||||||
|
endif
|
||||||
|
|
||||||
-include Makefile.config
|
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
||||||
|
UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data
|
||||||
ifeq ($(tests), yes)
|
|
||||||
makefiles += \
|
makefiles += \
|
||||||
src/libutil/tests/local.mk \
|
src/libutil/tests/local.mk \
|
||||||
src/libstore/tests/local.mk \
|
src/libstore/tests/local.mk \
|
||||||
src/libexpr/tests/local.mk \
|
src/libexpr/tests/local.mk
|
||||||
tests/local.mk \
|
endif
|
||||||
tests/ca/local.mk \
|
|
||||||
tests/dyn-drv/local.mk \
|
ifeq ($(ENABLE_TESTS), yes)
|
||||||
tests/test-libstoreconsumer/local.mk \
|
makefiles += \
|
||||||
tests/plugins/local.mk
|
tests/functional/local.mk \
|
||||||
|
tests/functional/ca/local.mk \
|
||||||
|
tests/functional/dyn-drv/local.mk \
|
||||||
|
tests/functional/test-libstoreconsumer/local.mk \
|
||||||
|
tests/functional/plugins/local.mk
|
||||||
else
|
else
|
||||||
makefiles += \
|
makefiles += \
|
||||||
mk/disable-tests.mk
|
mk/disable-tests.mk
|
||||||
|
|
|
@ -46,5 +46,6 @@ sandbox_shell = @sandbox_shell@
|
||||||
storedir = @storedir@
|
storedir = @storedir@
|
||||||
sysconfdir = @sysconfdir@
|
sysconfdir = @sysconfdir@
|
||||||
system = @system@
|
system = @system@
|
||||||
tests = @tests@
|
ENABLE_BUILD = @ENABLE_BUILD@
|
||||||
|
ENABLE_TESTS = @ENABLE_TESTS@
|
||||||
internal_api_docs = @internal_api_docs@
|
internal_api_docs = @internal_api_docs@
|
||||||
|
|
|
@ -59,12 +59,18 @@ index b5d71e62..aed7b0bf 100644
|
||||||
GC_bool found_me = FALSE;
|
GC_bool found_me = FALSE;
|
||||||
size_t nthreads = 0;
|
size_t nthreads = 0;
|
||||||
int i;
|
int i;
|
||||||
@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void)
|
@@ -851,6 +853,37 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
hi = p->altstack + p->altstack_size;
|
hi = p->altstack + p->altstack_size;
|
||||||
/* FIXME: Need to scan the normal stack too, but how ? */
|
/* FIXME: Need to scan the normal stack too, but how ? */
|
||||||
/* FIXME: Assume stack grows down */
|
/* FIXME: Assume stack grows down */
|
||||||
+ } else {
|
+ } else {
|
||||||
+ if (pthread_getattr_np(p->id, &pattr)) {
|
+#ifdef HAVE_PTHREAD_ATTR_GET_NP
|
||||||
|
+ if (!pthread_attr_init(&pattr)
|
||||||
|
+ || !pthread_attr_get_np(p->id, &pattr))
|
||||||
|
+#else /* HAVE_PTHREAD_GETATTR_NP */
|
||||||
|
+ if (pthread_getattr_np(p->id, &pattr))
|
||||||
|
+#endif
|
||||||
|
+ {
|
||||||
+ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
|
+ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
|
||||||
+ }
|
+ }
|
||||||
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
#! /bin/sh -e
|
|
||||||
rm -f aclocal.m4
|
|
||||||
mkdir -p config
|
|
||||||
exec autoreconf -vfi
|
|
11
configure.ac
11
configure.ac
|
@ -152,12 +152,17 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
|
||||||
LDFLAGS="-latomic $LDFLAGS"
|
LDFLAGS="-latomic $LDFLAGS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Running the functional tests without building Nix is useful for testing
|
||||||
|
# different pre-built versions of Nix against each other.
|
||||||
|
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
||||||
|
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
||||||
|
AC_SUBST(ENABLE_BUILD)
|
||||||
# Building without tests is useful for bootstrapping with a smaller footprint
|
# Building without tests is useful for bootstrapping with a smaller footprint
|
||||||
# or running the tests in a separate derivation. Otherwise, we do compile and
|
# or running the tests in a separate derivation. Otherwise, we do compile and
|
||||||
# run them.
|
# run them.
|
||||||
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
||||||
tests=$enableval, tests=yes)
|
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
||||||
AC_SUBST(tests)
|
AC_SUBST(ENABLE_TESTS)
|
||||||
|
|
||||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
||||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
||||||
|
@ -289,7 +294,7 @@ if test "$gc" = yes; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
if test "$tests" = yes; then
|
if test "$ENABLE_TESTS" = yes; then
|
||||||
|
|
||||||
# Look for gtest.
|
# Look for gtest.
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||||
|
|
|
@ -54,6 +54,23 @@ INPUT = \
|
||||||
src/nix-env \
|
src/nix-env \
|
||||||
src/nix-store
|
src/nix-store
|
||||||
|
|
||||||
|
# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
|
||||||
|
# in the source code. If set to NO, only conditional compilation will be
|
||||||
|
# performed. Macro expansion can be done in a controlled way by setting
|
||||||
|
# EXPAND_ONLY_PREDEF to YES.
|
||||||
|
# The default value is: NO.
|
||||||
|
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||||
|
|
||||||
|
MACRO_EXPANSION = YES
|
||||||
|
|
||||||
|
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
|
||||||
|
# the macro expansion is limited to the macros specified with the PREDEFINED and
|
||||||
|
# EXPAND_AS_DEFINED tags.
|
||||||
|
# The default value is: NO.
|
||||||
|
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||||
|
|
||||||
|
EXPAND_ONLY_PREDEF = YES
|
||||||
|
|
||||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||||
# contain include files that are not input files but should be processed by the
|
# contain include files that are not input files but should be processed by the
|
||||||
# preprocessor. Note that the INCLUDE_PATH is not recursive, so the setting of
|
# preprocessor. Note that the INCLUDE_PATH is not recursive, so the setting of
|
||||||
|
@ -61,3 +78,16 @@ INPUT = \
|
||||||
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
||||||
|
|
||||||
INCLUDE_PATH = @RAPIDCHECK_HEADERS@
|
INCLUDE_PATH = @RAPIDCHECK_HEADERS@
|
||||||
|
|
||||||
|
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||||
|
# tag can be used to specify a list of macro names that should be expanded. The
|
||||||
|
# macro definition that is found in the sources will be used. Use the PREDEFINED
|
||||||
|
# tag if you want to use a different macro definition that overrules the
|
||||||
|
# definition found in the source code.
|
||||||
|
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||||
|
|
||||||
|
EXPAND_AS_DEFINED = \
|
||||||
|
DECLARE_COMMON_SERIALISER \
|
||||||
|
DECLARE_WORKER_SERIALISER \
|
||||||
|
DECLARE_SERVE_SERIALISER \
|
||||||
|
LENGTH_PREFIXED_PROTO_HELPER
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins)
|
||||||
attrNames attrValues fromJSON listToAttrs mapAttrs
|
attrNames attrValues fromJSON listToAttrs mapAttrs groupBy
|
||||||
concatStringsSep concatMap length lessThan replaceStrings sort;
|
concatStringsSep concatMap length lessThan replaceStrings sort;
|
||||||
inherit (import ./utils.nix) concatStrings optionalString filterAttrs trim squash unique showSettings;
|
inherit (import ./utils.nix) attrsToList concatStrings optionalString filterAttrs trim squash unique;
|
||||||
|
showStoreDocs = import ./generate-store-info.nix;
|
||||||
in
|
in
|
||||||
|
|
||||||
commandDump:
|
inlineHTML: commandDump:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
|
@ -30,7 +31,7 @@ let
|
||||||
|
|
||||||
${maybeSubcommands}
|
${maybeSubcommands}
|
||||||
|
|
||||||
${maybeDocumentation}
|
${maybeStoreDocs}
|
||||||
|
|
||||||
${maybeOptions}
|
${maybeOptions}
|
||||||
'';
|
'';
|
||||||
|
@ -40,15 +41,15 @@ let
|
||||||
showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "...";
|
showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "...";
|
||||||
arguments = concatStringsSep " " (map showArgument args);
|
arguments = concatStringsSep " " (map showArgument args);
|
||||||
in ''
|
in ''
|
||||||
`${command}` [*option*...] ${arguments}
|
`${command}` [*option*...] ${arguments}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
maybeSubcommands = optionalString (details ? commands && details.commands != {})
|
maybeSubcommands = optionalString (details ? commands && details.commands != {})
|
||||||
''
|
''
|
||||||
where *subcommand* is one of the following:
|
where *subcommand* is one of the following:
|
||||||
|
|
||||||
${subcommands}
|
${subcommands}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
subcommands = if length categories > 1
|
subcommands = if length categories > 1
|
||||||
then listCategories
|
then listCategories
|
||||||
|
@ -70,45 +71,57 @@ let
|
||||||
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# TODO: move this confusing special case out of here when implementing #8496
|
# FIXME: this is a hack.
|
||||||
maybeDocumentation = optionalString
|
# store parameters should not be part of command documentation to begin
|
||||||
(details ? doc)
|
# with, but instead be rendered on separate pages.
|
||||||
(replaceStrings ["@stores@"] [storeDocs] details.doc);
|
maybeStoreDocs = optionalString (details ? doc)
|
||||||
|
(replaceStrings [ "@stores@" ] [ (showStoreDocs inlineHTML commandInfo.stores) ] details.doc);
|
||||||
|
|
||||||
maybeOptions = optionalString (details.flags != {}) ''
|
maybeOptions = let
|
||||||
|
allVisibleOptions = filterAttrs
|
||||||
|
(_: o: ! o.hiddenCategory)
|
||||||
|
(details.flags // toplevel.flags);
|
||||||
|
in optionalString (allVisibleOptions != {}) ''
|
||||||
# Options
|
# Options
|
||||||
|
|
||||||
${showOptions details.flags toplevel.flags}
|
${showOptions inlineHTML allVisibleOptions}
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
||||||
'';
|
'';
|
||||||
|
|
||||||
showOptions = options: commonOptions:
|
showOptions = inlineHTML: allOptions:
|
||||||
let
|
let
|
||||||
allOptions = options // commonOptions;
|
showCategory = cat: opts: ''
|
||||||
showCategory = cat: ''
|
${optionalString (cat != "") "## ${cat}"}
|
||||||
${optionalString (cat != "") "**${cat}:**"}
|
|
||||||
|
|
||||||
${listOptions (filterAttrs (n: v: v.category == cat) allOptions)}
|
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
|
||||||
'';
|
'';
|
||||||
listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
|
|
||||||
showOption = name: option:
|
showOption = name: option:
|
||||||
let
|
let
|
||||||
|
result = trim ''
|
||||||
|
- ${item}
|
||||||
|
|
||||||
|
${option.description}
|
||||||
|
'';
|
||||||
|
item = if inlineHTML
|
||||||
|
then ''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
|
||||||
|
else "`--${name}` ${shortName} ${labels}";
|
||||||
shortName = optionalString
|
shortName = optionalString
|
||||||
(option ? shortName)
|
(option ? shortName)
|
||||||
("/ `-${option.shortName}`");
|
("/ `-${option.shortName}`");
|
||||||
labels = optionalString
|
labels = optionalString
|
||||||
(option ? labels)
|
(option ? labels)
|
||||||
(concatStringsSep " " (map (s: "*${s}*") option.labels));
|
(concatStringsSep " " (map (s: "*${s}*") option.labels));
|
||||||
in trim ''
|
in result;
|
||||||
- <span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}
|
categories = mapAttrs
|
||||||
|
# Convert each group from a list of key-value pairs back to an attrset
|
||||||
${option.description}
|
(_: listToAttrs)
|
||||||
'';
|
(groupBy
|
||||||
categories = sort lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
|
(cmd: cmd.value.category)
|
||||||
in concatStrings (map showCategory categories);
|
(attrsToList allOptions));
|
||||||
|
in concatStrings (attrValues (mapAttrs showCategory categories));
|
||||||
in squash result;
|
in squash result;
|
||||||
|
|
||||||
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
||||||
|
@ -140,35 +153,4 @@ let
|
||||||
" - [${page.command}](command-ref/new-cli/${page.name})";
|
" - [${page.command}](command-ref/new-cli/${page.name})";
|
||||||
in concatStringsSep "\n" (map showEntry manpages) + "\n";
|
in concatStringsSep "\n" (map showEntry manpages) + "\n";
|
||||||
|
|
||||||
storeDocs =
|
|
||||||
let
|
|
||||||
showStore = name: { settings, doc, experimentalFeature }:
|
|
||||||
let
|
|
||||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
|
||||||
> **Warning**
|
|
||||||
> This store is part of an
|
|
||||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
|
||||||
|
|
||||||
To use this store, you need to make sure the corresponding experimental feature,
|
|
||||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
|
||||||
is enabled.
|
|
||||||
For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
|
||||||
|
|
||||||
```
|
|
||||||
extra-experimental-features = ${experimentalFeature}
|
|
||||||
```
|
|
||||||
'';
|
|
||||||
in ''
|
|
||||||
## ${name}
|
|
||||||
|
|
||||||
${doc}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
|
||||||
|
|
||||||
**Settings**:
|
|
||||||
|
|
||||||
${showSettings { useAnchors = false; } settings}
|
|
||||||
'';
|
|
||||||
in concatStrings (attrValues (mapAttrs showStore commandInfo.stores));
|
|
||||||
|
|
||||||
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||||
|
|
66
doc/manual/generate-settings.nix
Normal file
66
doc/manual/generate-settings.nix
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
let
|
||||||
|
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs;
|
||||||
|
inherit (import ./utils.nix) concatStrings indent optionalString squash;
|
||||||
|
in
|
||||||
|
|
||||||
|
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
|
||||||
|
{ prefix, inlineHTML ? true }: settingsInfo:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
|
||||||
|
let
|
||||||
|
result = squash ''
|
||||||
|
- ${item}
|
||||||
|
|
||||||
|
${indent " " body}
|
||||||
|
'';
|
||||||
|
item = if inlineHTML
|
||||||
|
then ''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
|
||||||
|
else "`${setting}`";
|
||||||
|
# separate body to cleanly handle indentation
|
||||||
|
body = ''
|
||||||
|
${description}
|
||||||
|
|
||||||
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
**Default:** ${showDefault documentDefault defaultValue}
|
||||||
|
|
||||||
|
${showAliases aliases}
|
||||||
|
'';
|
||||||
|
|
||||||
|
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||||
|
> **Warning**
|
||||||
|
> This setting is part of an
|
||||||
|
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||||
|
|
||||||
|
To change this setting, you need to make sure the corresponding experimental feature,
|
||||||
|
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||||
|
is enabled.
|
||||||
|
For example, include the following in [`nix.conf`](#):
|
||||||
|
|
||||||
|
```
|
||||||
|
extra-experimental-features = ${experimentalFeature}
|
||||||
|
${setting} = ...
|
||||||
|
```
|
||||||
|
'';
|
||||||
|
|
||||||
|
showDefault = documentDefault: defaultValue:
|
||||||
|
if documentDefault then
|
||||||
|
# a StringMap value type is specified as a string, but
|
||||||
|
# this shows the value type. The empty stringmap is `null` in
|
||||||
|
# JSON, but that converts to `{ }` here.
|
||||||
|
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
|
||||||
|
then "*empty*"
|
||||||
|
else if isBool defaultValue then
|
||||||
|
if defaultValue then "`true`" else "`false`"
|
||||||
|
else "`${toString defaultValue}`"
|
||||||
|
else "*machine-specific*";
|
||||||
|
|
||||||
|
showAliases = aliases:
|
||||||
|
optionalString (aliases != [])
|
||||||
|
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||||
|
|
||||||
|
in result;
|
||||||
|
|
||||||
|
in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))
|
45
doc/manual/generate-store-info.nix
Normal file
45
doc/manual/generate-store-info.nix
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
let
|
||||||
|
inherit (builtins) attrValues mapAttrs;
|
||||||
|
inherit (import ./utils.nix) concatStrings optionalString;
|
||||||
|
showSettings = import ./generate-settings.nix;
|
||||||
|
in
|
||||||
|
|
||||||
|
inlineHTML: storesInfo:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
showStore = name: { settings, doc, experimentalFeature }:
|
||||||
|
let
|
||||||
|
|
||||||
|
result = ''
|
||||||
|
## ${name}
|
||||||
|
|
||||||
|
${doc}
|
||||||
|
|
||||||
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
### Settings
|
||||||
|
|
||||||
|
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||||
|
'';
|
||||||
|
|
||||||
|
# markdown doesn't like spaces in URLs
|
||||||
|
slug = builtins.replaceStrings [ " " ] [ "-" ] name;
|
||||||
|
|
||||||
|
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||||
|
> **Warning**
|
||||||
|
> This store is part of an
|
||||||
|
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||||
|
|
||||||
|
To use this store, you need to make sure the corresponding experimental feature,
|
||||||
|
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||||
|
is enabled.
|
||||||
|
For example, include the following in [`nix.conf`](#):
|
||||||
|
|
||||||
|
```
|
||||||
|
extra-experimental-features = ${experimentalFeature}
|
||||||
|
```
|
||||||
|
'';
|
||||||
|
in result;
|
||||||
|
|
||||||
|
in concatStrings (attrValues (mapAttrs showStore storesInfo))
|
|
@ -96,14 +96,14 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/sr
|
||||||
@cp $< $@
|
@cp $< $@
|
||||||
@$(call process-includes,$@,$@)
|
@$(call process-includes,$@,$@)
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(bindir)/nix
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(bindir)/nix
|
||||||
@rm -rf $@ $@.tmp
|
@rm -rf $@ $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)'
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { useAnchors = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "opt-"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/nix.json: $(bindir)/nix
|
$(d)/nix.json: $(bindir)/nix
|
||||||
|
@ -173,6 +173,10 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||||
done
|
done
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
|
# the `! -name 'contributing.md'` filter excludes the one place where
|
||||||
|
# `@docroot@` is to be preserved for documenting the mechanism
|
||||||
|
# FIXME: maybe contributing guides should live right next to the code
|
||||||
|
# instead of in the manual
|
||||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md
|
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md
|
||||||
$(trace-gen) \
|
$(trace-gen) \
|
||||||
tmp="$$(mktemp -d)"; \
|
tmp="$$(mktemp -d)"; \
|
||||||
|
@ -180,7 +184,7 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/
|
||||||
find "$$tmp" -name '*.md' | while read -r file; do \
|
find "$$tmp" -name '*.md' | while read -r file; do \
|
||||||
$(call process-includes,$$file,$$file); \
|
$(call process-includes,$$file,$$file); \
|
||||||
done; \
|
done; \
|
||||||
find "$$tmp" -name '*.md' | while read -r file; do \
|
find "$$tmp" -name '*.md' ! -name 'documentation.md' | while read -r file; do \
|
||||||
docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/src)"; \
|
docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/src)"; \
|
||||||
sed -i "s,@docroot@,$$docroot,g" "$$file"; \
|
sed -i "s,@docroot@,$$docroot,g" "$$file"; \
|
||||||
done; \
|
done; \
|
||||||
|
|
|
@ -336,14 +336,13 @@ const redirects = {
|
||||||
"simple-values": "#primitives",
|
"simple-values": "#primitives",
|
||||||
"lists": "#list",
|
"lists": "#list",
|
||||||
"strings": "#string",
|
"strings": "#string",
|
||||||
"lists": "#list",
|
|
||||||
"attribute-sets": "#attribute-set",
|
"attribute-sets": "#attribute-set",
|
||||||
},
|
},
|
||||||
"installation/installing-binary.html": {
|
"installation/installing-binary.html": {
|
||||||
"linux": "uninstall.html#linux",
|
"linux": "uninstall.html#linux",
|
||||||
"macos": "uninstall.html#macos",
|
"macos": "uninstall.html#macos",
|
||||||
"uninstalling": "uninstall.html",
|
"uninstalling": "uninstall.html",
|
||||||
}
|
},
|
||||||
"contributing/hacking.html": {
|
"contributing/hacking.html": {
|
||||||
"nix-with-flakes": "#building-nix-with-flakes",
|
"nix-with-flakes": "#building-nix-with-flakes",
|
||||||
"classic-nix": "#building-nix",
|
"classic-nix": "#building-nix",
|
||||||
|
@ -355,6 +354,7 @@ const redirects = {
|
||||||
"installer-tests": "testing.html#installer-tests",
|
"installer-tests": "testing.html#installer-tests",
|
||||||
"one-time-setup": "testing.html#one-time-setup",
|
"one-time-setup": "testing.html#one-time-setup",
|
||||||
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||||
|
"characterization-testing": "#characterisation-testing-unit",
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -30,9 +30,11 @@
|
||||||
- [Data Types](language/values.md)
|
- [Data Types](language/values.md)
|
||||||
- [Language Constructs](language/constructs.md)
|
- [Language Constructs](language/constructs.md)
|
||||||
- [String interpolation](language/string-interpolation.md)
|
- [String interpolation](language/string-interpolation.md)
|
||||||
|
- [Lookup path](language/constructs/lookup-path.md)
|
||||||
- [Operators](language/operators.md)
|
- [Operators](language/operators.md)
|
||||||
- [Derivations](language/derivations.md)
|
- [Derivations](language/derivations.md)
|
||||||
- [Advanced Attributes](language/advanced-attributes.md)
|
- [Advanced Attributes](language/advanced-attributes.md)
|
||||||
|
- [Import From Derivation](language/import-from-derivation.md)
|
||||||
- [Built-in Constants](language/builtin-constants.md)
|
- [Built-in Constants](language/builtin-constants.md)
|
||||||
- [Built-in Functions](language/builtins.md)
|
- [Built-in Functions](language/builtins.md)
|
||||||
- [Advanced Topics](advanced-topics/advanced-topics.md)
|
- [Advanced Topics](advanced-topics/advanced-topics.md)
|
||||||
|
@ -105,6 +107,7 @@
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/contributing.md)
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
- [Testing](contributing/testing.md)
|
- [Testing](contributing/testing.md)
|
||||||
|
- [Documentation](contributing/documentation.md)
|
||||||
- [Experimental Features](contributing/experimental-features.md)
|
- [Experimental Features](contributing/experimental-features.md)
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [C++ style guide](contributing/cxx.md)
|
- [C++ style guide](contributing/cxx.md)
|
||||||
|
|
|
@ -69,6 +69,8 @@ exec nix copy --to "s3://example-nix-cache" $OUT_PATHS
|
||||||
> store sign`. Nix guarantees the paths will not contain any spaces,
|
> store sign`. Nix guarantees the paths will not contain any spaces,
|
||||||
> however a store path might contain glob characters. The `set -f`
|
> however a store path might contain glob characters. The `set -f`
|
||||||
> disables globbing in the shell.
|
> disables globbing in the shell.
|
||||||
|
> If you want to upload the `.drv` file too, the `$DRV_PATH` variable
|
||||||
|
> is also defined for the script and works just like `$OUT_PATHS`.
|
||||||
|
|
||||||
Then make sure the hook program is executable by the `root` user:
|
Then make sure the hook program is executable by the `root` user:
|
||||||
|
|
||||||
|
|
|
@ -2,109 +2,124 @@
|
||||||
|
|
||||||
Most Nix commands interpret the following environment variables:
|
Most Nix commands interpret the following environment variables:
|
||||||
|
|
||||||
- <span id="env-IN_NIX_SHELL">[`IN_NIX_SHELL`](#env-IN_NIX_SHELL)</span>\
|
- <span id="env-IN_NIX_SHELL">[`IN_NIX_SHELL`](#env-IN_NIX_SHELL)</span>
|
||||||
Indicator that tells if the current environment was set up by
|
|
||||||
`nix-shell`. It can have the values `pure` or `impure`.
|
|
||||||
|
|
||||||
- <span id="env-NIX_PATH">[`NIX_PATH`](#env-NIX_PATH)</span>\
|
Indicator that tells if the current environment was set up by
|
||||||
A colon-separated list of directories used to look up the location of Nix
|
`nix-shell`. It can have the values `pure` or `impure`.
|
||||||
expressions using [paths](@docroot@/language/values.md#type-path)
|
|
||||||
enclosed in angle brackets (i.e., `<path>`),
|
|
||||||
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
|
||||||
[`-I` option](@docroot@/command-ref/opt-common.md#opt-I).
|
|
||||||
|
|
||||||
If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode:
|
- <span id="env-NIX_PATH">[`NIX_PATH`](#env-NIX_PATH)</span>
|
||||||
|
|
||||||
1. `$HOME/.nix-defexpr/channels`
|
A colon-separated list of directories used to look up the location of Nix
|
||||||
2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs`
|
expressions using [paths](@docroot@/language/values.md#type-path)
|
||||||
3. `/nix/var/nix/profiles/per-user/root/channels`
|
enclosed in angle brackets (i.e., `<path>`),
|
||||||
|
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||||
|
[`-I` option](@docroot@/command-ref/opt-common.md#opt-I).
|
||||||
|
|
||||||
If `NIX_PATH` is set to an empty string, resolving search paths will always fail.
|
If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode:
|
||||||
For example, attempting to use `<nixpkgs>` will produce:
|
|
||||||
|
|
||||||
error: file 'nixpkgs' was not found in the Nix search path
|
1. `$HOME/.nix-defexpr/channels`
|
||||||
|
2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs`
|
||||||
|
3. `/nix/var/nix/profiles/per-user/root/channels`
|
||||||
|
|
||||||
- <span id="env-NIX_IGNORE_SYMLINK_STORE">[`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE)</span>\
|
If `NIX_PATH` is set to an empty string, resolving search paths will always fail.
|
||||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
For example, attempting to use `<nixpkgs>` will produce:
|
||||||
allowed to contain any symlink components. This is to prevent
|
|
||||||
“impure” builds. Builders sometimes “canonicalise” paths by
|
|
||||||
resolving all symlink components. Thus, builds on different machines
|
|
||||||
(with `/nix/store` resolving to different locations) could yield
|
|
||||||
different results. This is generally not a problem, except when
|
|
||||||
builds are deployed to machines where `/nix/store` resolves
|
|
||||||
differently. If you are sure that you’re not going to do that, you
|
|
||||||
can set `NIX_IGNORE_SYMLINK_STORE` to `1`.
|
|
||||||
|
|
||||||
Note that if you’re symlinking the Nix store so that you can put it
|
error: file 'nixpkgs' was not found in the Nix search path
|
||||||
on another file system than the root file system, on Linux you’re
|
|
||||||
better off using `bind` mount points, e.g.,
|
|
||||||
|
|
||||||
```console
|
- <span id="env-NIX_IGNORE_SYMLINK_STORE">[`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE)</span>
|
||||||
$ mkdir /nix
|
|
||||||
$ mount -o bind /mnt/otherdisk/nix /nix
|
|
||||||
```
|
|
||||||
|
|
||||||
Consult the mount 8 manual page for details.
|
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||||
|
allowed to contain any symlink components. This is to prevent
|
||||||
|
“impure” builds. Builders sometimes “canonicalise” paths by
|
||||||
|
resolving all symlink components. Thus, builds on different machines
|
||||||
|
(with `/nix/store` resolving to different locations) could yield
|
||||||
|
different results. This is generally not a problem, except when
|
||||||
|
builds are deployed to machines where `/nix/store` resolves
|
||||||
|
differently. If you are sure that you’re not going to do that, you
|
||||||
|
can set `NIX_IGNORE_SYMLINK_STORE` to `1`.
|
||||||
|
|
||||||
- <span id="env-NIX_STORE_DIR">[`NIX_STORE_DIR`](#env-NIX_STORE_DIR)</span>\
|
Note that if you’re symlinking the Nix store so that you can put it
|
||||||
Overrides the location of the Nix store (default `prefix/store`).
|
on another file system than the root file system, on Linux you’re
|
||||||
|
better off using `bind` mount points, e.g.,
|
||||||
|
|
||||||
- <span id="env-NIX_DATA_DIR">[`NIX_DATA_DIR`](#env-NIX_DATA_DIR)</span>\
|
```console
|
||||||
Overrides the location of the Nix static data directory (default
|
$ mkdir /nix
|
||||||
`prefix/share`).
|
$ mount -o bind /mnt/otherdisk/nix /nix
|
||||||
|
```
|
||||||
|
|
||||||
- <span id="env-NIX_LOG_DIR">[`NIX_LOG_DIR`](#env-NIX_LOG_DIR)</span>\
|
Consult the mount 8 manual page for details.
|
||||||
Overrides the location of the Nix log directory (default
|
|
||||||
`prefix/var/log/nix`).
|
|
||||||
|
|
||||||
- <span id="env-NIX_STATE_DIR">[`NIX_STATE_DIR`](#env-NIX_STATE_DIR)</span>\
|
- <span id="env-NIX_STORE_DIR">[`NIX_STORE_DIR`](#env-NIX_STORE_DIR)</span>
|
||||||
Overrides the location of the Nix state directory (default
|
|
||||||
`prefix/var/nix`).
|
|
||||||
|
|
||||||
- <span id="env-NIX_CONF_DIR">[`NIX_CONF_DIR`](#env-NIX_CONF_DIR)</span>\
|
Overrides the location of the Nix store (default `prefix/store`).
|
||||||
Overrides the location of the system Nix configuration directory
|
|
||||||
(default `prefix/etc/nix`).
|
|
||||||
|
|
||||||
- <span id="env-NIX_CONFIG">[`NIX_CONFIG`](#env-NIX_CONFIG)</span>\
|
- <span id="env-NIX_DATA_DIR">[`NIX_DATA_DIR`](#env-NIX_DATA_DIR)</span>
|
||||||
Applies settings from Nix configuration from the environment.
|
|
||||||
The content is treated as if it was read from a Nix configuration file.
|
|
||||||
Settings are separated by the newline character.
|
|
||||||
|
|
||||||
- <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>\
|
Overrides the location of the Nix static data directory (default
|
||||||
Overrides the location of the Nix user configuration files to load from.
|
`prefix/share`).
|
||||||
|
|
||||||
The default are the locations according to the [XDG Base Directory Specification].
|
- <span id="env-NIX_LOG_DIR">[`NIX_LOG_DIR`](#env-NIX_LOG_DIR)</span>
|
||||||
See the [XDG Base Directories](#xdg-base-directories) sub-section for details.
|
|
||||||
|
|
||||||
The variable is treated as a list separated by the `:` token.
|
Overrides the location of the Nix log directory (default
|
||||||
|
`prefix/var/log/nix`).
|
||||||
|
|
||||||
- <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>\
|
- <span id="env-NIX_STATE_DIR">[`NIX_STATE_DIR`](#env-NIX_STATE_DIR)</span>
|
||||||
Use the specified directory to store temporary files. In particular,
|
|
||||||
this includes temporary build directories; these can take up
|
|
||||||
substantial amounts of disk space. The default is `/tmp`.
|
|
||||||
|
|
||||||
- <span id="env-NIX_REMOTE">[`NIX_REMOTE`](#env-NIX_REMOTE)</span>\
|
Overrides the location of the Nix state directory (default
|
||||||
This variable should be set to `daemon` if you want to use the Nix
|
`prefix/var/nix`).
|
||||||
daemon to execute Nix operations. This is necessary in [multi-user
|
|
||||||
Nix installations](@docroot@/installation/multi-user.md). If the Nix
|
|
||||||
daemon's Unix socket is at some non-standard path, this variable
|
|
||||||
should be set to `unix://path/to/socket`. Otherwise, it should be
|
|
||||||
left unset.
|
|
||||||
|
|
||||||
- <span id="env-NIX_SHOW_STATS">[`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS)</span>\
|
- <span id="env-NIX_CONF_DIR">[`NIX_CONF_DIR`](#env-NIX_CONF_DIR)</span>
|
||||||
If set to `1`, Nix will print some evaluation statistics, such as
|
|
||||||
the number of values allocated.
|
|
||||||
|
|
||||||
- <span id="env-NIX_COUNT_CALLS">[`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS)</span>\
|
Overrides the location of the system Nix configuration directory
|
||||||
If set to `1`, Nix will print how often functions were called during
|
(default `prefix/etc/nix`).
|
||||||
Nix expression evaluation. This is useful for profiling your Nix
|
|
||||||
expressions.
|
|
||||||
|
|
||||||
- <span id="env-GC_INITIAL_HEAP_SIZE">[`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE)</span>\
|
- <span id="env-NIX_CONFIG">[`NIX_CONFIG`](#env-NIX_CONFIG)</span>
|
||||||
If Nix has been configured to use the Boehm garbage collector, this
|
|
||||||
variable sets the initial size of the heap in bytes. It defaults to
|
Applies settings from Nix configuration from the environment.
|
||||||
384 MiB. Setting it to a low value reduces memory consumption, but
|
The content is treated as if it was read from a Nix configuration file.
|
||||||
will increase runtime due to the overhead of garbage collection.
|
Settings are separated by the newline character.
|
||||||
|
|
||||||
|
- <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>
|
||||||
|
|
||||||
|
Overrides the location of the Nix user configuration files to load from.
|
||||||
|
|
||||||
|
The default are the locations according to the [XDG Base Directory Specification].
|
||||||
|
See the [XDG Base Directories](#xdg-base-directories) sub-section for details.
|
||||||
|
|
||||||
|
The variable is treated as a list separated by the `:` token.
|
||||||
|
|
||||||
|
- <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>
|
||||||
|
|
||||||
|
Use the specified directory to store temporary files. In particular,
|
||||||
|
this includes temporary build directories; these can take up
|
||||||
|
substantial amounts of disk space. The default is `/tmp`.
|
||||||
|
|
||||||
|
- <span id="env-NIX_REMOTE">[`NIX_REMOTE`](#env-NIX_REMOTE)</span>
|
||||||
|
|
||||||
|
This variable should be set to `daemon` if you want to use the Nix
|
||||||
|
daemon to execute Nix operations. This is necessary in [multi-user
|
||||||
|
Nix installations](@docroot@/installation/multi-user.md). If the Nix
|
||||||
|
daemon's Unix socket is at some non-standard path, this variable
|
||||||
|
should be set to `unix://path/to/socket`. Otherwise, it should be
|
||||||
|
left unset.
|
||||||
|
|
||||||
|
- <span id="env-NIX_SHOW_STATS">[`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS)</span>
|
||||||
|
|
||||||
|
If set to `1`, Nix will print some evaluation statistics, such as
|
||||||
|
the number of values allocated.
|
||||||
|
|
||||||
|
- <span id="env-NIX_COUNT_CALLS">[`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS)</span>
|
||||||
|
|
||||||
|
If set to `1`, Nix will print how often functions were called during
|
||||||
|
Nix expression evaluation. This is useful for profiling your Nix
|
||||||
|
expressions.
|
||||||
|
|
||||||
|
- <span id="env-GC_INITIAL_HEAP_SIZE">[`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE)</span>
|
||||||
|
|
||||||
|
If Nix has been configured to use the Boehm garbage collector, this
|
||||||
|
variable sets the initial size of the heap in bytes. It defaults to
|
||||||
|
384 MiB. Setting it to a low value reduces memory consumption, but
|
||||||
|
will increase runtime due to the overhead of garbage collection.
|
||||||
|
|
||||||
## XDG Base Directories
|
## XDG Base Directories
|
||||||
|
|
||||||
|
|
|
@ -14,19 +14,21 @@
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
The install operation creates a new user environment, based on the
|
The install operation creates a new user environment.
|
||||||
current generation of the active profile, to which a set of store paths
|
It is based on the current generation of the active [profile](@docroot@/command-ref/files/profiles.md), to which a set of [store paths] described by *args* is added.
|
||||||
described by *args* is added. The arguments *args* map to store paths in
|
|
||||||
a number of possible ways:
|
[store paths]: @docroot@/glossary.md#gloss-store-path
|
||||||
|
|
||||||
|
The arguments *args* map to store paths in a number of possible ways:
|
||||||
|
|
||||||
|
|
||||||
- By default, *args* is a set of [derivation] names denoting derivations
|
- By default, *args* is a set of [derivation] names denoting derivations in the [default Nix expression].
|
||||||
in the active Nix expression. These are realised, and the resulting
|
These are [realised], and the resulting output paths are installed.
|
||||||
output paths are installed. Currently installed derivations with a
|
Currently installed derivations with a name equal to the name of a derivation being added are removed unless the option `--preserve-installed` is specified.
|
||||||
name equal to the name of a derivation being added are removed
|
|
||||||
unless the option `--preserve-installed` is specified.
|
|
||||||
|
|
||||||
[derivation]: @docroot@/language/derivations.md
|
[derivation]: @docroot@/glossary.md#gloss-derivation
|
||||||
|
[default Nix expression]: @docroot@/command-ref/files/default-nix-expression.md
|
||||||
|
[realised]: @docroot@/glossary.md#gloss-realise
|
||||||
|
|
||||||
If there are multiple derivations matching a name in *args* that
|
If there are multiple derivations matching a name in *args* that
|
||||||
have the same name (e.g., `gcc-3.3.6` and `gcc-4.1.1`), then the
|
have the same name (e.g., `gcc-3.3.6` and `gcc-4.1.1`), then the
|
||||||
|
@ -43,40 +45,33 @@ a number of possible ways:
|
||||||
gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
|
gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
|
||||||
probably cause a user environment conflict\!).
|
probably cause a user environment conflict\!).
|
||||||
|
|
||||||
- If `--attr` (`-A`) is specified, the arguments are *attribute
|
- If [`--attr`](#opt-attr) / `-A` is specified, the arguments are *attribute paths* that select attributes from the [default Nix expression].
|
||||||
paths* that select attributes from the top-level Nix
|
This is faster than using derivation names and unambiguous.
|
||||||
expression. This is faster than using derivation names and
|
Show the attribute paths of available packages with [`nix-env --query`](./query.md):
|
||||||
unambiguous. To find out the attribute paths of available
|
|
||||||
packages, use `nix-env --query --available --attr-path `.
|
```console
|
||||||
|
nix-env --query --available --attr-path`
|
||||||
|
```
|
||||||
|
|
||||||
- If `--from-profile` *path* is given, *args* is a set of names
|
- If `--from-profile` *path* is given, *args* is a set of names
|
||||||
denoting installed store paths in the profile *path*. This is an
|
denoting installed [store paths] in the profile *path*. This is an
|
||||||
easy way to copy user environment elements from one profile to
|
easy way to copy user environment elements from one profile to
|
||||||
another.
|
another.
|
||||||
|
|
||||||
- If `--from-expression` is given, *args* are Nix
|
- If `--from-expression` is given, *args* are [Nix language functions](@docroot@/language/constructs.md#functions) that are called with the [default Nix expression] as their single argument.
|
||||||
[functions](@docroot@/language/constructs.md#functions)
|
The derivations returned by those function calls are installed.
|
||||||
that are called with the active Nix expression as their single
|
This allows derivations to be specified in an unambiguous way, which is necessary if there are multiple derivations with the same name.
|
||||||
argument. The derivations returned by those function calls are
|
|
||||||
installed. This allows derivations to be specified in an
|
|
||||||
unambiguous way, which is necessary if there are multiple
|
|
||||||
derivations with the same name.
|
|
||||||
|
|
||||||
- If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are
|
- If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are [realised], and the resulting output paths are installed.
|
||||||
[realised](@docroot@/command-ref/nix-store/realise.md), and the resulting output paths
|
|
||||||
are installed.
|
|
||||||
|
|
||||||
- If *args* are store paths that are not store derivations, then these
|
- If *args* are [store paths] that are not store derivations, then these are [realised] and installed.
|
||||||
are [realised](@docroot@/command-ref/nix-store/realise.md) and installed.
|
|
||||||
|
|
||||||
- By default all outputs are installed for each derivation.
|
- By default all [outputs](@docroot@/language/derivations.md#attr-outputs) are installed for each [derivation].
|
||||||
This can be overridden by adding a `meta.outputsToInstall` attribute on the derivation listing a subset of the output names.
|
This can be overridden by adding a `meta.outputsToInstall` attribute on the derivation listing a subset of the output names.
|
||||||
|
|
||||||
<!-- TODO: add anchor link to `outputs` when #7320 is merged -->
|
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
The file `example.nix` defines a [derivation] with two outputs `foo` and `bar`, each containing a file.
|
The file `example.nix` defines a derivation with two outputs `foo` and `bar`, each containing a file.
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
# example.nix
|
# example.nix
|
||||||
|
@ -123,15 +118,17 @@ a number of possible ways:
|
||||||
manifest.nix
|
manifest.nix
|
||||||
```
|
```
|
||||||
|
|
||||||
# Flags
|
# Options
|
||||||
|
|
||||||
|
- `--prebuilt-only` / `-b`
|
||||||
|
|
||||||
- `--prebuilt-only` / `-b`\
|
|
||||||
Use only derivations for which a substitute is registered, i.e.,
|
Use only derivations for which a substitute is registered, i.e.,
|
||||||
there is a pre-built binary available that can be downloaded in lieu
|
there is a pre-built binary available that can be downloaded in lieu
|
||||||
of building the derivation. Thus, no packages will be built from
|
of building the derivation. Thus, no packages will be built from
|
||||||
source.
|
source.
|
||||||
|
|
||||||
- `--preserve-installed` / `-P`\
|
- `--preserve-installed` / `-P`
|
||||||
|
|
||||||
Do not remove derivations with a name matching one of the
|
Do not remove derivations with a name matching one of the
|
||||||
derivations being installed. Usually, trying to have two versions of
|
derivations being installed. Usually, trying to have two versions of
|
||||||
the same package installed in the same generation of a profile will
|
the same package installed in the same generation of a profile will
|
||||||
|
@ -139,7 +136,8 @@ a number of possible ways:
|
||||||
clashes between the two versions. However, this is not the case for
|
clashes between the two versions. However, this is not the case for
|
||||||
all packages.
|
all packages.
|
||||||
|
|
||||||
- `--remove-all` / `-r`\
|
- `--remove-all` / `-r`
|
||||||
|
|
||||||
Remove all previously installed packages first. This is equivalent
|
Remove all previously installed packages first. This is equivalent
|
||||||
to running `nix-env --uninstall '.*'` first, except that everything happens
|
to running `nix-env --uninstall '.*'` first, except that everything happens
|
||||||
in a single transaction.
|
in a single transaction.
|
||||||
|
|
|
@ -15,8 +15,12 @@ Each of *paths* is processed as follows:
|
||||||
1. If it is not [valid], substitute the store derivation file itself.
|
1. If it is not [valid], substitute the store derivation file itself.
|
||||||
2. Realise its [output paths]:
|
2. Realise its [output paths]:
|
||||||
- Try to fetch from [substituters] the [store objects] associated with the output paths in the store derivation's [closure].
|
- Try to fetch from [substituters] the [store objects] associated with the output paths in the store derivation's [closure].
|
||||||
- With [content-addressed derivations] (experimental): Determine the output paths to realise by querying content-addressed realisation entries in the [Nix database].
|
- With [content-addressed derivations] (experimental):
|
||||||
- For any store paths that cannot be substituted, produce the required store objects. This involves first realising all outputs of the derivation's dependencies and then running the derivation's [`builder`](@docroot@/language/derivations.md#attr-builder) executable. <!-- TODO: Link to build process page #8888 -->
|
Determine the output paths to realise by querying content-addressed realisation entries in the [Nix database].
|
||||||
|
- For any store paths that cannot be substituted, produce the required store objects:
|
||||||
|
1. Realise all outputs of the derivation's dependencies
|
||||||
|
2. Run the derivation's [`builder`](@docroot@/language/derivations.md#attr-builder) executable
|
||||||
|
<!-- TODO: Link to build process page #8888 -->
|
||||||
- Otherwise, and if the path is not already valid: Try to fetch the associated [store objects] in the path's [closure] from [substituters].
|
- Otherwise, and if the path is not already valid: Try to fetch the associated [store objects] in the path's [closure] from [substituters].
|
||||||
|
|
||||||
If no substitutes are available and no store derivation is given, realisation fails.
|
If no substitutes are available and no store derivation is given, realisation fails.
|
||||||
|
|
|
@ -1 +1,8 @@
|
||||||
# Contributing
|
# Development
|
||||||
|
|
||||||
|
Nix is developed on GitHub.
|
||||||
|
Check the [contributing guide](https://github.com/NixOS/nix/blob/master/CONTRIBUTING.md) if you want to get involved.
|
||||||
|
|
||||||
|
This chapter is a collection of guides for making changes to the code and documentation.
|
||||||
|
|
||||||
|
If you're not sure where to start, try to [compile Nix from source](./hacking.md) and consider [making improvements to documentation](./documentation.md).
|
||||||
|
|
181
doc/manual/src/contributing/documentation.md
Normal file
181
doc/manual/src/contributing/documentation.md
Normal file
|
@ -0,0 +1,181 @@
|
||||||
|
# Contributing documentation
|
||||||
|
|
||||||
|
Improvements to documentation are very much appreciated, and a good way to start out with contributing to Nix.
|
||||||
|
|
||||||
|
This is how you can help:
|
||||||
|
- Address [open issues with documentation](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation)
|
||||||
|
- Review [pull requests concerning documentation](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+label%3Adocumentation)
|
||||||
|
|
||||||
|
Incremental refactorings of the documentation build setup to make it faster or easier to understand and maintain are also welcome.
|
||||||
|
|
||||||
|
## Building the manual
|
||||||
|
|
||||||
|
Build the manual from scratch:
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix-build $(nix-instantiate)'!doc'
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix build .#^doc
|
||||||
|
```
|
||||||
|
|
||||||
|
and open `./result-doc/share/doc/nix/manual/index.html`.
|
||||||
|
|
||||||
|
To build the manual incrementally, [enter the development shell](./hacking.md) and run:
|
||||||
|
|
||||||
|
```console
|
||||||
|
make manual-html -j $NIX_BUILD_CORES
|
||||||
|
```
|
||||||
|
|
||||||
|
and open `./outputs/out/share/doc/nix/manual/language/index.html`.
|
||||||
|
|
||||||
|
In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building:
|
||||||
|
|
||||||
|
[Makefile for the manual]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||||
|
|
||||||
|
```console
|
||||||
|
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES
|
||||||
|
```
|
||||||
|
|
||||||
|
## Style guide
|
||||||
|
|
||||||
|
The goal of this style guide is to make it such that
|
||||||
|
- The manual is easy to search and skim for relevant information
|
||||||
|
- Documentation sources are easy to edit
|
||||||
|
- Changes to documentation are easy to review
|
||||||
|
|
||||||
|
You will notice that this is not implemented consistently yet.
|
||||||
|
Please follow the guide when making additions or changes to existing documentation.
|
||||||
|
Do not make sweeping changes, unless they are programmatic and can be validated easily.
|
||||||
|
|
||||||
|
### Language
|
||||||
|
|
||||||
|
This manual is [reference documentation](https://diataxis.fr/reference/).
|
||||||
|
The typical usage pattern is to look up isolated pieces of information.
|
||||||
|
It should therefore aim to be correct, consistent, complete, and easy to navigate at a glance.
|
||||||
|
|
||||||
|
- Aim for clarity and brevity.
|
||||||
|
|
||||||
|
Please take the time to read the [plain language guidelines](https://www.plainlanguage.gov/guidelines/) for details.
|
||||||
|
|
||||||
|
- Describe the subject factually.
|
||||||
|
|
||||||
|
In particular, do not make value judgements or recommendations.
|
||||||
|
Check the code or add tests if in doubt.
|
||||||
|
|
||||||
|
- Provide complete, minimal examples, and explain them.
|
||||||
|
|
||||||
|
Readers should be able to try examples verbatim and get the same results as shown in the manual.
|
||||||
|
Always describe in words what a given example does.
|
||||||
|
|
||||||
|
Non-trivial examples may need additional explanation, especially if they use concepts from outside the given context.
|
||||||
|
|
||||||
|
- Use British English.
|
||||||
|
|
||||||
|
This is a somewhat arbitrary choice to force consistency, and accounts for the fact that a majority of Nix users and developers are from Europe.
|
||||||
|
|
||||||
|
### Links and anchors
|
||||||
|
|
||||||
|
Reference documentation must be readable in arbitrary order.
|
||||||
|
Readers cannot be expected to have any particular prerequisite knowledge about Nix.
|
||||||
|
While the table of contents can provide guidance and full-text search can help, they are most likely to find what they need by following sensible cross-references.
|
||||||
|
|
||||||
|
- Link to technical terms
|
||||||
|
|
||||||
|
When mentioning Nix-specific concepts, commands, options, settings, etc., link to appropriate documentation.
|
||||||
|
Also link to external tools or concepts, especially if their meaning may be ambiguous.
|
||||||
|
You may also want to link to definitions of less common technical terms.
|
||||||
|
|
||||||
|
Then readers won't have to actively search for definitions and are more likely to discover relevant information on their own.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> `man` and `--help` pages don't display links.
|
||||||
|
> Use appropriate link texts such that readers of terminal output can infer search terms.
|
||||||
|
|
||||||
|
- Do not break existing URLs between releases.
|
||||||
|
|
||||||
|
There are countless links in the wild pointing to old versions of the manual.
|
||||||
|
We want people to find up-to-date documentation when following popular advice.
|
||||||
|
|
||||||
|
- When moving files, update [redirects on nixos.org](https://github.com/NixOS/nixos-homepage/blob/master/netlify.toml).
|
||||||
|
|
||||||
|
This is especially important when moving information out of the Nix manual to other resources.
|
||||||
|
|
||||||
|
- When changing anchors, update [client-side redirects](https://github.com/NixOS/nix/blob/master/doc/manual/redirects.js)
|
||||||
|
|
||||||
|
The current setup is cumbersome, and help making better automation is appreciated.
|
||||||
|
|
||||||
|
The build checks for broken internal links with.
|
||||||
|
This happens late in the process, so [building the whole manual](#building-the-manual) is not suitable for iterating quickly.
|
||||||
|
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||||
|
|
||||||
|
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
||||||
|
[URI fragments]: https://en.wikipedia.org/wiki/URI_fragment
|
||||||
|
|
||||||
|
### Markdown conventions
|
||||||
|
|
||||||
|
The manual is written in markdown, and rendered with [mdBook](https://github.com/rust-lang/mdBook) for the web and with [lowdown](https://github.com/kristapsdz/lowdown) for `man` pages and `--help` output.
|
||||||
|
|
||||||
|
For supported markdown features, refer to:
|
||||||
|
- [mdBook documentation](https://rust-lang.github.io/mdBook/format/markdown.html)
|
||||||
|
- [lowdown documentation](https://kristaps.bsd.lv/lowdown/)
|
||||||
|
|
||||||
|
Please observe these guidelines to ease reviews:
|
||||||
|
|
||||||
|
- Write one sentence per line.
|
||||||
|
|
||||||
|
This makes long sentences immediately visible, and makes it easier to review changes and make direct suggestions.
|
||||||
|
|
||||||
|
- Use reference links – sparingly – to ease source readability.
|
||||||
|
Put definitions close to their first use.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
A [store object] contains a [file system object] and [references] to other store objects.
|
||||||
|
|
||||||
|
[store object]: @docroot@/glossary.md#gloss-store-object
|
||||||
|
[file system object]: @docroot@/architecture/file-system-object.md
|
||||||
|
[references]: @docroot@/glossary.md#gloss-reference
|
||||||
|
```
|
||||||
|
|
||||||
|
- Use admonitions of the following form:
|
||||||
|
|
||||||
|
```
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> This is a note.
|
||||||
|
```
|
||||||
|
|
||||||
|
### The `@docroot@` variable
|
||||||
|
|
||||||
|
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||||
|
|
||||||
|
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
||||||
|
|
||||||
|
If the `@docroot@` literal appears in an error message from the [`mdbook-linkcheck`] tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
||||||
|
See existing `@docroot@` logic in the [Makefile for the manual].
|
||||||
|
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
||||||
|
|
||||||
|
## API documentation
|
||||||
|
|
||||||
|
[Doxygen API documentation] is available online.
|
||||||
|
You can also build and view it yourself:
|
||||||
|
|
||||||
|
[Doxygen API documentation]: https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix build .#hydraJobs.internal-api-docs
|
||||||
|
# xdg-open ./result/share/doc/nix/internal-api/html/index.html
|
||||||
|
```
|
||||||
|
|
||||||
|
or inside `nix-shell` or `nix develop`:
|
||||||
|
|
||||||
|
```
|
||||||
|
# make internal-api-html
|
||||||
|
# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
|
||||||
|
```
|
|
@ -42,8 +42,8 @@ $ nix develop .#native-clang11StdenvPackages
|
||||||
To build Nix itself in this shell:
|
To build Nix itself in this shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ ./bootstrap.sh
|
[nix-shell]$ autoreconfPhase
|
||||||
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
[nix-shell]$ configurePhase
|
||||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ $ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages
|
||||||
To build Nix itself in this shell:
|
To build Nix itself in this shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ ./bootstrap.sh
|
[nix-shell]$ autoreconfPhase
|
||||||
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
||||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||||
```
|
```
|
||||||
|
@ -220,54 +220,3 @@ Configure your editor to use the `clangd` from the shell, either by running it i
|
||||||
> For some editors (e.g. Visual Studio Code), you may need to install a [special extension](https://open-vsx.org/extension/llvm-vs-code-extensions/vscode-clangd) for the editor to interact with `clangd`.
|
> For some editors (e.g. Visual Studio Code), you may need to install a [special extension](https://open-vsx.org/extension/llvm-vs-code-extensions/vscode-clangd) for the editor to interact with `clangd`.
|
||||||
> Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim).
|
> Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim).
|
||||||
> Editor-specific setup is typically opinionated, so we will not cover it here in more detail.
|
> Editor-specific setup is typically opinionated, so we will not cover it here in more detail.
|
||||||
|
|
||||||
### Checking links in the manual
|
|
||||||
|
|
||||||
The build checks for broken internal links.
|
|
||||||
This happens late in the process, so `nix build` is not suitable for iterating.
|
|
||||||
To build the manual incrementally, run:
|
|
||||||
|
|
||||||
```console
|
|
||||||
make html -j $NIX_BUILD_CORES
|
|
||||||
```
|
|
||||||
|
|
||||||
In order to reflect changes to the [Makefile], clear all generated files before re-building:
|
|
||||||
|
|
||||||
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
|
||||||
|
|
||||||
```console
|
|
||||||
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
|
|
||||||
```
|
|
||||||
|
|
||||||
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
|
||||||
|
|
||||||
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
|
||||||
[URI fragments]: https://en.wikipedia.org/wiki/URI_fragment
|
|
||||||
|
|
||||||
#### `@docroot@` variable
|
|
||||||
|
|
||||||
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
|
||||||
|
|
||||||
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
|
||||||
|
|
||||||
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
|
||||||
See existing `@docroot@` logic in the [Makefile].
|
|
||||||
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
|
||||||
|
|
||||||
## API documentation
|
|
||||||
|
|
||||||
Doxygen API documentation is [available
|
|
||||||
online](https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs). You
|
|
||||||
can also build and view it yourself:
|
|
||||||
|
|
||||||
```console
|
|
||||||
# nix build .#hydraJobs.internal-api-docs
|
|
||||||
# xdg-open ./result/share/doc/nix/internal-api/html/index.html
|
|
||||||
```
|
|
||||||
|
|
||||||
or inside a `nix develop` shell by running:
|
|
||||||
|
|
||||||
```
|
|
||||||
# make internal-api-html
|
|
||||||
# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
|
|
||||||
```
|
|
||||||
|
|
|
@ -16,17 +16,73 @@ You can build it yourself:
|
||||||
|
|
||||||
## Unit-tests
|
## Unit-tests
|
||||||
|
|
||||||
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
The unit tests are defined using the [googletest] and [rapidcheck] frameworks.
|
||||||
under `src/{library_name}/tests` using the
|
|
||||||
[googletest](https://google.github.io/googletest/) and
|
[googletest]: https://google.github.io/googletest/
|
||||||
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
[rapidcheck]: https://github.com/emil-e/rapidcheck
|
||||||
|
|
||||||
|
### Source and header layout
|
||||||
|
|
||||||
|
> An example of some files, demonstrating much of what is described below
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> src
|
||||||
|
> ├── libexpr
|
||||||
|
> │ ├── value/context.hh
|
||||||
|
> │ ├── value/context.cc
|
||||||
|
> │ │
|
||||||
|
> │ …
|
||||||
|
> └── tests
|
||||||
|
> │ ├── value/context.hh
|
||||||
|
> │ ├── value/context.cc
|
||||||
|
> │ │
|
||||||
|
> │ …
|
||||||
|
> │
|
||||||
|
> ├── unit-test-data
|
||||||
|
> │ ├── libstore
|
||||||
|
> │ │ ├── worker-protocol/content-address.bin
|
||||||
|
> │ │ …
|
||||||
|
> │ …
|
||||||
|
> …
|
||||||
|
> ```
|
||||||
|
|
||||||
|
The unit tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `src/${library_shortname}/tests` within the directory for the library (`src/${library_shortname}`).
|
||||||
|
|
||||||
|
The data is in `unit-test-data`, with one subdir per library, with the same name as where the code goes.
|
||||||
|
For example, `libnixstore` code is in `src/libstore`, and its test data is in `unit-test-data/libstore`.
|
||||||
|
The path to the `unit-test-data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
> Due to the way googletest works, downstream unit test executables will actually include and re-run upstream library tests.
|
||||||
|
> Therefore it is important that the same value for `_NIX_TEST_UNIT_DATA` be used with the tests for each library.
|
||||||
|
> That is why we have the test data nested within a single `unit-test-data` directory.
|
||||||
|
|
||||||
|
### Running tests
|
||||||
|
|
||||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`.
|
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`.
|
||||||
Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable.
|
Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable.
|
||||||
|
|
||||||
|
### Characterisation testing { #characaterisation-testing-unit }
|
||||||
|
|
||||||
|
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
||||||
|
|
||||||
|
Like with the functional characterisation, `_NIX_TEST_ACCEPT=1` is also used.
|
||||||
|
For example:
|
||||||
|
```shell-session
|
||||||
|
$ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN
|
||||||
|
...
|
||||||
|
[ SKIPPED ] WorkerProtoTest.string_read
|
||||||
|
[ SKIPPED ] WorkerProtoTest.string_write
|
||||||
|
[ SKIPPED ] WorkerProtoTest.storePath_read
|
||||||
|
[ SKIPPED ] WorkerProtoTest.storePath_write
|
||||||
|
...
|
||||||
|
```
|
||||||
|
will regenerate the "golden master" expected result for the `libnixstore` characterisation tests.
|
||||||
|
The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything.
|
||||||
|
|
||||||
## Functional tests
|
## Functional tests
|
||||||
|
|
||||||
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`.
|
||||||
Each test is a bash script.
|
Each test is a bash script.
|
||||||
|
|
||||||
### Running the whole test suite
|
### Running the whole test suite
|
||||||
|
@ -35,8 +91,8 @@ The whole test suite can be run with:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ make install && make installcheck
|
$ make install && make installcheck
|
||||||
ran test tests/foo.sh... [PASS]
|
ran test tests/functional/foo.sh... [PASS]
|
||||||
ran test tests/bar.sh... [PASS]
|
ran test tests/functional/bar.sh... [PASS]
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -44,14 +100,14 @@ ran test tests/bar.sh... [PASS]
|
||||||
|
|
||||||
Sometimes it is useful to group related tests so they can be easily run together without running the entire test suite.
|
Sometimes it is useful to group related tests so they can be easily run together without running the entire test suite.
|
||||||
Each test group is in a subdirectory of `tests`.
|
Each test group is in a subdirectory of `tests`.
|
||||||
For example, `tests/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs.
|
For example, `tests/functional/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs.
|
||||||
|
|
||||||
That test group can be run like this:
|
That test group can be run like this:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ make ca.test-group -j50
|
$ make ca.test-group -j50
|
||||||
ran test tests/ca/nix-run.sh... [PASS]
|
ran test tests/functional/ca/nix-run.sh... [PASS]
|
||||||
ran test tests/ca/import-derivation.sh... [PASS]
|
ran test tests/functional/ca/import-derivation.sh... [PASS]
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -70,21 +126,21 @@ install-tests-groups += $(test-group-name)
|
||||||
Individual tests can be run with `make`:
|
Individual tests can be run with `make`:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ make tests/${testName}.sh.test
|
$ make tests/functional/${testName}.sh.test
|
||||||
ran test tests/${testName}.sh... [PASS]
|
ran test tests/functional/${testName}.sh... [PASS]
|
||||||
```
|
```
|
||||||
|
|
||||||
or without `make`:
|
or without `make`:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ ./mk/run-test.sh tests/${testName}.sh
|
$ ./mk/run-test.sh tests/functional/${testName}.sh
|
||||||
ran test tests/${testName}.sh... [PASS]
|
ran test tests/functional/${testName}.sh... [PASS]
|
||||||
```
|
```
|
||||||
|
|
||||||
To see the complete output, one can also run:
|
To see the complete output, one can also run:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ ./mk/debug-test.sh tests/${testName}.sh
|
$ ./mk/debug-test.sh tests/functional/${testName}.sh
|
||||||
+ foo
|
+ foo
|
||||||
output from foo
|
output from foo
|
||||||
+ bar
|
+ bar
|
||||||
|
@ -119,7 +175,7 @@ edit it like so:
|
||||||
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ ./mk/debug-test.sh tests/${testName}.sh
|
$ ./mk/debug-test.sh tests/functional/${testName}.sh
|
||||||
...
|
...
|
||||||
+ gdb blash blub
|
+ gdb blash blub
|
||||||
GNU gdb (GDB) 12.1
|
GNU gdb (GDB) 12.1
|
||||||
|
@ -130,17 +186,29 @@ GNU gdb (GDB) 12.1
|
||||||
One can debug the Nix invocation in all the usual ways.
|
One can debug the Nix invocation in all the usual ways.
|
||||||
For example, enter `run` to start the Nix invocation.
|
For example, enter `run` to start the Nix invocation.
|
||||||
|
|
||||||
### Characterization testing
|
### Troubleshooting
|
||||||
|
|
||||||
Occasionally, Nix utilizes a technique called [Characterization Testing](https://en.wikipedia.org/wiki/Characterization_test) as part of the functional tests.
|
Sometimes running tests in the development shell may leave artefacts in the local repository.
|
||||||
|
To remove any traces of that:
|
||||||
|
|
||||||
|
```console
|
||||||
|
git clean -x --force tests
|
||||||
|
```
|
||||||
|
|
||||||
|
### Characterisation testing { #characterisation-testing-functional }
|
||||||
|
|
||||||
|
Occasionally, Nix utilizes a technique called [Characterisation Testing](https://en.wikipedia.org/wiki/Characterization_test) as part of the functional tests.
|
||||||
This technique is to include the exact output/behavior of a former version of Nix in a test in order to check that Nix continues to produce the same behavior going forward.
|
This technique is to include the exact output/behavior of a former version of Nix in a test in order to check that Nix continues to produce the same behavior going forward.
|
||||||
|
|
||||||
For example, this technique is used for the language tests, to check both the printed final value if evaluation was successful, and any errors and warnings encountered.
|
For example, this technique is used for the language tests, to check both the printed final value if evaluation was successful, and any errors and warnings encountered.
|
||||||
|
|
||||||
It is frequently useful to regenerate the expected output.
|
It is frequently useful to regenerate the expected output.
|
||||||
To do that, rerun the failed test with `_NIX_TEST_ACCEPT=1`.
|
To do that, rerun the failed test(s) with `_NIX_TEST_ACCEPT=1`.
|
||||||
(At least, this is the convention we've used for `tests/lang.sh`.
|
For example:
|
||||||
If we add more characterization testing we should always strive to be consistent.)
|
```bash
|
||||||
|
_NIX_TEST_ACCEPT=1 make tests/functional/lang.sh.test
|
||||||
|
```
|
||||||
|
This convention is shared with the [characterisation unit tests](#characterisation-testing-unit) too.
|
||||||
|
|
||||||
An interesting situation to document is the case when these tests are "overfitted".
|
An interesting situation to document is the case when these tests are "overfitted".
|
||||||
The language tests are, again, an example of this.
|
The language tests are, again, an example of this.
|
||||||
|
@ -153,7 +221,7 @@ Diagnostic outputs are indeed not a stable interface, but they still are importa
|
||||||
By recording the expected output, the test suite guards against accidental changes, and ensure the *result* (not just the code that implements it) of the diagnostic code paths are under code review.
|
By recording the expected output, the test suite guards against accidental changes, and ensure the *result* (not just the code that implements it) of the diagnostic code paths are under code review.
|
||||||
Regressions are caught, and improvements always show up in code review.
|
Regressions are caught, and improvements always show up in code review.
|
||||||
|
|
||||||
To ensure that characterization testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`.
|
To ensure that characterisation testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`.
|
||||||
|
|
||||||
## Integration tests
|
## Integration tests
|
||||||
|
|
||||||
|
@ -167,7 +235,7 @@ You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-
|
||||||
|
|
||||||
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
||||||
|
|
||||||
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
Creating a Cachix cache for your installer tests and adding its authorisation token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
||||||
|
|
||||||
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
||||||
- `x86_64-linux`
|
- `x86_64-linux`
|
||||||
|
|
|
@ -33,11 +33,15 @@
|
||||||
|
|
||||||
Ensure a [store path] is [valid][validity].
|
Ensure a [store path] is [valid][validity].
|
||||||
|
|
||||||
This means either running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation], or fetching a pre-built [store object] from a [substituter], or delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs. <!-- TODO: link [running] to build process page, #8888 -->
|
This can be achieved by:
|
||||||
|
- Fetching a pre-built [store object] from a [substituter]
|
||||||
|
- Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
|
||||||
|
- Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
|
||||||
|
<!-- TODO: link [running] to build process page, #8888 -->
|
||||||
|
|
||||||
See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md).
|
See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
|
||||||
|
|
||||||
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
See also [`nix-build`](./command-ref/nix-build.md) and [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
||||||
|
|
||||||
[realise]: #gloss-realise
|
[realise]: #gloss-realise
|
||||||
|
|
||||||
|
@ -105,12 +109,15 @@
|
||||||
|
|
||||||
- [store object]{#gloss-store-object}
|
- [store object]{#gloss-store-object}
|
||||||
|
|
||||||
|
|
||||||
A store object consists of a [file system object], [reference]s to other store objects, and other metadata.
|
A store object consists of a [file system object], [reference]s to other store objects, and other metadata.
|
||||||
It can be referred to by a [store path].
|
It can be referred to by a [store path].
|
||||||
|
|
||||||
[store object]: #gloss-store-object
|
[store object]: #gloss-store-object
|
||||||
|
|
||||||
|
- [IFD]{#gloss-ifd}
|
||||||
|
|
||||||
|
[Import From Derivation](./language/import-from-derivation.md)
|
||||||
|
|
||||||
- [input-addressed store object]{#gloss-input-addressed-store-object}
|
- [input-addressed store object]{#gloss-input-addressed-store-object}
|
||||||
|
|
||||||
A store object produced by building a
|
A store object produced by building a
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
After cloning Nix's Git repository, issue the following commands:
|
After cloning Nix's Git repository, issue the following commands:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ ./bootstrap.sh
|
$ autoreconf -vfi
|
||||||
$ ./configure options...
|
$ ./configure options...
|
||||||
$ make
|
$ make
|
||||||
$ make install
|
$ make install
|
||||||
|
|
|
@ -112,6 +112,13 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
> environmental variables come from the environment of the
|
> environmental variables come from the environment of the
|
||||||
> `nix-build`.
|
> `nix-build`.
|
||||||
|
|
||||||
|
If the [`configurable-impure-env` experimental
|
||||||
|
feature](@docroot@/contributing/experimental-features.md#xp-feature-configurable-impure-env)
|
||||||
|
is enabled, these environment variables can also be controlled
|
||||||
|
through the
|
||||||
|
[`impure-env`](@docroot@/command-ref/conf-file.md#conf-impure-env)
|
||||||
|
configuration setting.
|
||||||
|
|
||||||
- [`outputHash`]{#adv-attr-outputHash}; [`outputHashAlgo`]{#adv-attr-outputHashAlgo}; [`outputHashMode`]{#adv-attr-outputHashMode}\
|
- [`outputHash`]{#adv-attr-outputHash}; [`outputHashAlgo`]{#adv-attr-outputHashAlgo}; [`outputHashMode`]{#adv-attr-outputHashMode}\
|
||||||
These attributes declare that the derivation is a so-called
|
These attributes declare that the derivation is a so-called
|
||||||
*fixed-output derivation*, which means that a cryptographic hash of
|
*fixed-output derivation*, which means that a cryptographic hash of
|
||||||
|
@ -229,6 +236,8 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
[`outputHashAlgo`](#adv-attr-outputHashAlgo)
|
[`outputHashAlgo`](#adv-attr-outputHashAlgo)
|
||||||
like for *fixed-output derivations* (see above).
|
like for *fixed-output derivations* (see above).
|
||||||
|
|
||||||
|
It also implicitly requires that the machine to build the derivation must have the `ca-derivations` [system feature](@docroot@/command-ref/conf-file.md#conf-system-features).
|
||||||
|
|
||||||
- [`passAsFile`]{#adv-attr-passAsFile}\
|
- [`passAsFile`]{#adv-attr-passAsFile}\
|
||||||
A list of names of attributes that should be passed via files rather
|
A list of names of attributes that should be passed via files rather
|
||||||
than environment variables. For example, if you have
|
than environment variables. For example, if you have
|
||||||
|
@ -271,18 +280,21 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
|
|
||||||
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
||||||
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
||||||
attributes are serialised in JSON format and made available to the
|
attributes are serialised into a file in JSON format. The environment variable
|
||||||
builder via the file `.attrs.json` in the builder’s temporary
|
`NIX_ATTRS_JSON_FILE` points to the exact location of that file both in a build
|
||||||
directory. This obviates the need for [`passAsFile`](#adv-attr-passAsFile) since JSON files
|
and a [`nix-shell`](../command-ref/nix-shell.md). This obviates the need for
|
||||||
have no size restrictions, unlike process environments.
|
[`passAsFile`](#adv-attr-passAsFile) since JSON files have no size restrictions,
|
||||||
|
unlike process environments.
|
||||||
|
|
||||||
It also makes it possible to tweak derivation settings in a structured way; see
|
It also makes it possible to tweak derivation settings in a structured way; see
|
||||||
[`outputChecks`](#adv-attr-outputChecks) for example.
|
[`outputChecks`](#adv-attr-outputChecks) for example.
|
||||||
|
|
||||||
As a convenience to Bash builders,
|
As a convenience to Bash builders,
|
||||||
Nix writes a script named `.attrs.sh` to the builder’s directory
|
Nix writes a script that initialises shell variables
|
||||||
that initialises shell variables corresponding to all attributes
|
corresponding to all attributes that are representable in Bash. The
|
||||||
that are representable in Bash. This includes non-nested
|
environment variable `NIX_ATTRS_SH_FILE` points to the exact
|
||||||
|
location of the script, both in a build and a
|
||||||
|
[`nix-shell`](../command-ref/nix-shell.md). This includes non-nested
|
||||||
(associative) arrays. For example, the attribute `hardening.format = true`
|
(associative) arrays. For example, the attribute `hardening.format = true`
|
||||||
ends up as the Bash associative array element `${hardening[format]}`.
|
ends up as the Bash associative array element `${hardening[format]}`.
|
||||||
|
|
||||||
|
@ -335,3 +347,15 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
This is useful, for example, when generating self-contained filesystem images with
|
This is useful, for example, when generating self-contained filesystem images with
|
||||||
their own embedded Nix store: hashes found inside such an image refer
|
their own embedded Nix store: hashes found inside such an image refer
|
||||||
to the embedded store and not to the host's Nix store.
|
to the embedded store and not to the host's Nix store.
|
||||||
|
|
||||||
|
- [`requiredSystemFeatures`]{#adv-attr-requiredSystemFeatures}\
|
||||||
|
|
||||||
|
If a derivation has the `requiredSystemFeatures` attribute, then Nix will only build it on a machine that has the corresponding features set in its [`system-features` configuration](@docroot@/command-ref/conf-file.md#conf-system-features).
|
||||||
|
|
||||||
|
For example, setting
|
||||||
|
|
||||||
|
```nix
|
||||||
|
requiredSystemFeatures = [ "kvm" ];
|
||||||
|
```
|
||||||
|
|
||||||
|
ensures that the derivation can only be built on a machine with the `kvm` feature.
|
||||||
|
|
27
doc/manual/src/language/constructs/lookup-path.md
Normal file
27
doc/manual/src/language/constructs/lookup-path.md
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# Lookup path
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
|
> *lookup-path* = `<` *identifier* [ `/` *identifier* ]... `>`
|
||||||
|
|
||||||
|
A lookup path is an identifier with an optional path suffix that resolves to a [path value](@docroot@/language/values.md#type-path) if the identifier matches a search path entry.
|
||||||
|
|
||||||
|
The value of a lookup path is determined by [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
|
||||||
|
|
||||||
|
See [`builtins.findFile`](@docroot@/language/builtins.md#builtins-findFile) for details on lookup path resolution.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> <nixpkgs>
|
||||||
|
>```
|
||||||
|
>
|
||||||
|
> /nix/var/nix/profiles/per-user/root/channels/nixpkgs
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> <nixpkgs/nixos>
|
||||||
|
>```
|
||||||
|
>
|
||||||
|
> /nix/var/nix/profiles/per-user/root/channels/nixpkgs/nixos
|
|
@ -1,161 +1,207 @@
|
||||||
# Derivations
|
# Derivations
|
||||||
|
|
||||||
The most important built-in function is `derivation`, which is used to
|
The most important built-in function is `derivation`, which is used to describe a single derivation:
|
||||||
describe a single derivation (a build task). It takes as input a set,
|
a specification for running an executable on precisely defined input files to repeatably produce output files at uniquely determined file system paths.
|
||||||
the attributes of which specify the inputs of the build.
|
|
||||||
|
|
||||||
- There must be an attribute named [`system`]{#attr-system} whose value must be a
|
It takes as input an attribute set, the attributes of which specify the inputs to the process.
|
||||||
string specifying a Nix system type, such as `"i686-linux"` or
|
It outputs an attribute set, and produces a [store derivation] as a side effect of evaluation.
|
||||||
`"x86_64-darwin"`. (To figure out your system type, run `nix -vv
|
|
||||||
--version`.) The build can only be performed on a machine and
|
|
||||||
operating system matching the system type. (Nix can automatically
|
|
||||||
[forward builds for other
|
|
||||||
platforms](../advanced-topics/distributed-builds.md) by forwarding
|
|
||||||
them to other machines.)
|
|
||||||
|
|
||||||
- There must be an attribute named `name` whose value must be a
|
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||||
string. This is used as a symbolic name for the package by
|
|
||||||
`nix-env`, and it is appended to the output paths of the derivation.
|
|
||||||
|
|
||||||
- There must be an attribute named [`builder`]{#attr-builder} that identifies the
|
<!-- FIXME: add a section on output attributes -->
|
||||||
program that is executed to perform the build. It can be either a
|
|
||||||
derivation or a source (a local file reference, e.g.,
|
|
||||||
`./builder.sh`).
|
|
||||||
|
|
||||||
- Every attribute is passed as an environment variable to the builder.
|
## Input attributes
|
||||||
Attribute values are translated to environment variables as follows:
|
|
||||||
|
|
||||||
- Strings and numbers are just passed verbatim.
|
|
||||||
|
|
||||||
- A *path* (e.g., `../foo/sources.tar`) causes the referenced file
|
|
||||||
to be copied to the store; its location in the store is put in
|
|
||||||
the environment variable. The idea is that all sources should
|
|
||||||
reside in the Nix store, since all inputs to a derivation should
|
|
||||||
reside in the Nix store.
|
|
||||||
|
|
||||||
- A *derivation* causes that derivation to be built prior to the
|
|
||||||
present derivation; its default output path is put in the
|
|
||||||
environment variable.
|
|
||||||
|
|
||||||
- Lists of the previous types are also allowed. They are simply
|
|
||||||
concatenated, separated by spaces.
|
|
||||||
|
|
||||||
- `true` is passed as the string `1`, `false` and `null` are
|
|
||||||
passed as an empty string.
|
|
||||||
|
|
||||||
- The optional attribute `args` specifies command-line arguments to be
|
### Required
|
||||||
passed to the builder. It should be a list.
|
|
||||||
|
|
||||||
- The optional attribute `outputs` specifies a list of symbolic
|
- [`name`]{#attr-name} ([String](@docroot@/language/values.md#type-string))
|
||||||
outputs of the derivation. By default, a derivation produces a
|
|
||||||
single output path, denoted as `out`. However, derivations can
|
|
||||||
produce multiple output paths. This is useful because it allows
|
|
||||||
outputs to be downloaded or garbage-collected separately. For
|
|
||||||
instance, imagine a library package that provides a dynamic library,
|
|
||||||
header files, and documentation. A program that links against the
|
|
||||||
library doesn’t need the header files and documentation at runtime,
|
|
||||||
and it doesn’t need the documentation at build time. Thus, the
|
|
||||||
library package could specify:
|
|
||||||
|
|
||||||
```nix
|
|
||||||
outputs = [ "lib" "headers" "doc" ];
|
|
||||||
```
|
|
||||||
|
|
||||||
This will cause Nix to pass environment variables `lib`, `headers`
|
|
||||||
and `doc` to the builder containing the intended store paths of each
|
|
||||||
output. The builder would typically do something like
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./configure \
|
|
||||||
--libdir=$lib/lib \
|
|
||||||
--includedir=$headers/include \
|
|
||||||
--docdir=$doc/share/doc
|
|
||||||
```
|
|
||||||
|
|
||||||
for an Autoconf-style package. You can refer to each output of a
|
|
||||||
derivation by selecting it as an attribute, e.g.
|
|
||||||
|
|
||||||
```nix
|
|
||||||
buildInputs = [ pkg.lib pkg.headers ];
|
|
||||||
```
|
|
||||||
|
|
||||||
The first element of `outputs` determines the *default output*.
|
|
||||||
Thus, you could also write
|
|
||||||
|
|
||||||
```nix
|
|
||||||
buildInputs = [ pkg pkg.headers ];
|
|
||||||
```
|
|
||||||
|
|
||||||
since `pkg` is equivalent to `pkg.lib`.
|
|
||||||
|
|
||||||
The function `mkDerivation` in the Nixpkgs standard environment is a
|
A symbolic name for the derivation.
|
||||||
wrapper around `derivation` that adds a default value for `system` and
|
It is added to the [store derivation]'s [path](@docroot@/glossary.md#gloss-store-path) and its [output paths][output path].
|
||||||
always uses Bash as the builder, to which the supplied builder is passed
|
|
||||||
as a command-line argument. See the Nixpkgs manual for details.
|
|
||||||
|
|
||||||
The builder is executed as follows:
|
Example: `name = "hello";`
|
||||||
|
|
||||||
- A temporary directory is created under the directory specified by
|
The store derivation's path will be `/nix/store/<hash>-hello.drv`, and the output paths will be of the form `/nix/store/<hash>-hello[-<output>]`
|
||||||
`TMPDIR` (default `/tmp`) where the build will take place. The
|
- [`system`]{#attr-system} ([String](@docroot@/language/values.md#type-string))
|
||||||
current directory is changed to this directory.
|
|
||||||
|
|
||||||
- The environment is cleared and set to the derivation attributes, as
|
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
||||||
specified above.
|
|
||||||
|
|
||||||
- In addition, the following variables are set:
|
A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
|
||||||
|
It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
|
||||||
- `NIX_BUILD_TOP` contains the path of the temporary directory for
|
|
||||||
this build.
|
|
||||||
|
|
||||||
- Also, `TMPDIR`, `TEMPDIR`, `TMP`, `TEMP` are set to point to the
|
|
||||||
temporary directory. This is to prevent the builder from
|
|
||||||
accidentally writing temporary files anywhere else. Doing so
|
|
||||||
might cause interference by other processes.
|
|
||||||
|
|
||||||
- `PATH` is set to `/path-not-set` to prevent shells from
|
|
||||||
initialising it to their built-in default value.
|
|
||||||
|
|
||||||
- `HOME` is set to `/homeless-shelter` to prevent programs from
|
|
||||||
using `/etc/passwd` or the like to find the user's home
|
|
||||||
directory, which could cause impurity. Usually, when `HOME` is
|
|
||||||
set, it is used as the location of the home directory, even if
|
|
||||||
it points to a non-existent path.
|
|
||||||
|
|
||||||
- `NIX_STORE` is set to the path of the top-level Nix store
|
|
||||||
directory (typically, `/nix/store`).
|
|
||||||
|
|
||||||
- For each output declared in `outputs`, the corresponding
|
|
||||||
environment variable is set to point to the intended path in the
|
|
||||||
Nix store for that output. Each output path is a concatenation
|
|
||||||
of the cryptographic hash of all build inputs, the `name`
|
|
||||||
attribute and the output name. (The output name is omitted if
|
|
||||||
it’s `out`.)
|
|
||||||
|
|
||||||
- If an output path already exists, it is removed. Also, locks are
|
Examples:
|
||||||
acquired to prevent multiple Nix instances from performing the same
|
|
||||||
build at the same time.
|
|
||||||
|
|
||||||
- A log of the combined standard output and error is written to
|
`system = "x86_64-linux";`
|
||||||
`/nix/var/log/nix`.
|
|
||||||
|
|
||||||
- The builder is executed with the arguments specified by the
|
`system = builtins.currentSystem;`
|
||||||
attribute `args`. If it exits with exit code 0, it is considered to
|
|
||||||
have succeeded.
|
|
||||||
|
|
||||||
- The temporary directory is removed (unless the `-K` option was
|
[`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) has the value of the [`system` configuration option], and defaults to the system type of the current Nix installation.
|
||||||
specified).
|
|
||||||
|
|
||||||
- If the build was successful, Nix scans each output path for
|
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
|
||||||
references to input paths by looking for the hash parts of the input
|
|
||||||
paths. Since these are potential runtime dependencies, Nix registers
|
|
||||||
them as dependencies of the output paths.
|
|
||||||
|
|
||||||
- After the build, Nix sets the last-modified timestamp on all files
|
- [`builder`]{#attr-builder} ([Path](@docroot@/language/values.md#type-path) | [String](@docroot@/language/values.md#type-string))
|
||||||
in the build result to 1 (00:00:01 1/1/1970 UTC), sets the group to
|
|
||||||
the default group, and sets the mode of the file to 0444 or 0555
|
Path to an executable that will perform the build.
|
||||||
(i.e., read-only, with execute permission enabled if the file was
|
|
||||||
originally executable). Note that possible `setuid` and `setgid`
|
Examples:
|
||||||
bits are cleared. Setuid and setgid programs are not currently
|
|
||||||
supported by Nix. This is because the Nix archives used in
|
`builder = "/bin/bash";`
|
||||||
deployment have no concept of ownership information, and because it
|
|
||||||
makes the build result dependent on the user performing the build.
|
`builder = ./builder.sh;`
|
||||||
|
|
||||||
|
`builder = "${pkgs.python}/bin/python";`
|
||||||
|
|
||||||
|
### Optional
|
||||||
|
|
||||||
|
- [`args`]{#attr-args} ([List](@docroot@/language/values.md#list) of [String](@docroot@/language/values.md#type-string)) Default: `[ ]`
|
||||||
|
|
||||||
|
Command-line arguments to be passed to the [`builder`](#attr-builder) executable.
|
||||||
|
|
||||||
|
Example: `args = [ "-c" "echo hello world > $out" ];`
|
||||||
|
|
||||||
|
- [`outputs`]{#attr-outputs} ([List](@docroot@/language/values.md#list) of [String](@docroot@/language/values.md#type-string)) Default: `[ "out" ]`
|
||||||
|
|
||||||
|
Symbolic outputs of the derivation.
|
||||||
|
Each output name is passed to the [`builder`](#attr-builder) executable as an environment variable with its value set to the corresponding [output path].
|
||||||
|
|
||||||
|
[output path]: @docroot@/glossary.md#gloss-output-path
|
||||||
|
|
||||||
|
By default, a derivation produces a single output path called `out`.
|
||||||
|
However, derivations can produce multiple output paths.
|
||||||
|
This allows the associated [store objects](@docroot@/glossary.md#gloss-store-object) and their [closures](@docroot@/glossary.md#gloss-closure) to be copied or garbage-collected separately.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
Imagine a library package that provides a dynamic library, header files, and documentation.
|
||||||
|
A program that links against such a library doesn’t need the header files and documentation at runtime, and it doesn’t need the documentation at build time.
|
||||||
|
Thus, the library package could specify:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
derivation {
|
||||||
|
# ...
|
||||||
|
outputs = [ "lib" "dev" "doc" ];
|
||||||
|
# ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This will cause Nix to pass environment variables `lib`, `dev`, and `doc` to the builder containing the intended store paths of each output.
|
||||||
|
The builder would typically do something like
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./configure \
|
||||||
|
--libdir=$lib/lib \
|
||||||
|
--includedir=$dev/include \
|
||||||
|
--docdir=$doc/share/doc
|
||||||
|
```
|
||||||
|
|
||||||
|
for an Autoconf-style package.
|
||||||
|
|
||||||
|
You can refer to each output of a derivation by selecting it as an attribute, e.g. `myPackage.lib` or `myPackage.doc`.
|
||||||
|
|
||||||
|
The first element of `outputs` determines the *default output*.
|
||||||
|
Therefore, in the given example, `myPackage` is equivalent to `myPackage.lib`.
|
||||||
|
|
||||||
|
<!-- FIXME: refer to the output attributes when we have one -->
|
||||||
|
|
||||||
|
- See [Advanced Attributes](./advanced-attributes.md) for more, infrequently used, optional attributes.
|
||||||
|
|
||||||
|
<!-- FIXME: This should be moved here -->
|
||||||
|
|
||||||
|
- Every other attribute is passed as an environment variable to the builder.
|
||||||
|
Attribute values are translated to environment variables as follows:
|
||||||
|
|
||||||
|
- Strings are passed unchanged.
|
||||||
|
|
||||||
|
- Integral numbers are converted to decimal notation.
|
||||||
|
|
||||||
|
- Floating point numbers are converted to simple decimal or scientific notation with a preset precision.
|
||||||
|
|
||||||
|
- A *path* (e.g., `../foo/sources.tar`) causes the referenced file
|
||||||
|
to be copied to the store; its location in the store is put in
|
||||||
|
the environment variable. The idea is that all sources should
|
||||||
|
reside in the Nix store, since all inputs to a derivation should
|
||||||
|
reside in the Nix store.
|
||||||
|
|
||||||
|
- A *derivation* causes that derivation to be built prior to the
|
||||||
|
present derivation; its default output path is put in the
|
||||||
|
environment variable.
|
||||||
|
|
||||||
|
- Lists of the previous types are also allowed. They are simply
|
||||||
|
concatenated, separated by spaces.
|
||||||
|
|
||||||
|
- `true` is passed as the string `1`, `false` and `null` are
|
||||||
|
passed as an empty string.
|
||||||
|
|
||||||
|
## Builder execution
|
||||||
|
|
||||||
|
The [`builder`](#attr-builder) is executed as follows:
|
||||||
|
|
||||||
|
- A temporary directory is created under the directory specified by
|
||||||
|
`TMPDIR` (default `/tmp`) where the build will take place. The
|
||||||
|
current directory is changed to this directory.
|
||||||
|
|
||||||
|
- The environment is cleared and set to the derivation attributes, as
|
||||||
|
specified above.
|
||||||
|
|
||||||
|
- In addition, the following variables are set:
|
||||||
|
|
||||||
|
- `NIX_BUILD_TOP` contains the path of the temporary directory for
|
||||||
|
this build.
|
||||||
|
|
||||||
|
- Also, `TMPDIR`, `TEMPDIR`, `TMP`, `TEMP` are set to point to the
|
||||||
|
temporary directory. This is to prevent the builder from
|
||||||
|
accidentally writing temporary files anywhere else. Doing so
|
||||||
|
might cause interference by other processes.
|
||||||
|
|
||||||
|
- `PATH` is set to `/path-not-set` to prevent shells from
|
||||||
|
initialising it to their built-in default value.
|
||||||
|
|
||||||
|
- `HOME` is set to `/homeless-shelter` to prevent programs from
|
||||||
|
using `/etc/passwd` or the like to find the user's home
|
||||||
|
directory, which could cause impurity. Usually, when `HOME` is
|
||||||
|
set, it is used as the location of the home directory, even if
|
||||||
|
it points to a non-existent path.
|
||||||
|
|
||||||
|
- `NIX_STORE` is set to the path of the top-level Nix store
|
||||||
|
directory (typically, `/nix/store`).
|
||||||
|
|
||||||
|
- `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs`
|
||||||
|
is set to `true` for the dervation. A detailed explanation of this
|
||||||
|
behavior can be found in the
|
||||||
|
[section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs).
|
||||||
|
|
||||||
|
- For each output declared in `outputs`, the corresponding
|
||||||
|
environment variable is set to point to the intended path in the
|
||||||
|
Nix store for that output. Each output path is a concatenation
|
||||||
|
of the cryptographic hash of all build inputs, the `name`
|
||||||
|
attribute and the output name. (The output name is omitted if
|
||||||
|
it’s `out`.)
|
||||||
|
|
||||||
|
- If an output path already exists, it is removed. Also, locks are
|
||||||
|
acquired to prevent multiple Nix instances from performing the same
|
||||||
|
build at the same time.
|
||||||
|
|
||||||
|
- A log of the combined standard output and error is written to
|
||||||
|
`/nix/var/log/nix`.
|
||||||
|
|
||||||
|
- The builder is executed with the arguments specified by the
|
||||||
|
attribute `args`. If it exits with exit code 0, it is considered to
|
||||||
|
have succeeded.
|
||||||
|
|
||||||
|
- The temporary directory is removed (unless the `-K` option was
|
||||||
|
specified).
|
||||||
|
|
||||||
|
- If the build was successful, Nix scans each output path for
|
||||||
|
references to input paths by looking for the hash parts of the input
|
||||||
|
paths. Since these are potential runtime dependencies, Nix registers
|
||||||
|
them as dependencies of the output paths.
|
||||||
|
|
||||||
|
- After the build, Nix sets the last-modified timestamp on all files
|
||||||
|
in the build result to 1 (00:00:01 1/1/1970 UTC), sets the group to
|
||||||
|
the default group, and sets the mode of the file to 0444 or 0555
|
||||||
|
(i.e., read-only, with execute permission enabled if the file was
|
||||||
|
originally executable). Note that possible `setuid` and `setgid`
|
||||||
|
bits are cleared. Setuid and setgid programs are not currently
|
||||||
|
supported by Nix. This is because the Nix archives used in
|
||||||
|
deployment have no concept of ownership information, and because it
|
||||||
|
makes the build result dependent on the user performing the build.
|
||||||
|
|
139
doc/manual/src/language/import-from-derivation.md
Normal file
139
doc/manual/src/language/import-from-derivation.md
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
# Import From Derivation
|
||||||
|
|
||||||
|
The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object).
|
||||||
|
|
||||||
|
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
||||||
|
|
||||||
|
- [`import`](./builtins.md#builtins-import)` expr`
|
||||||
|
- [`builtins.readFile`](./builtins.md#builtins-readFile)` expr`
|
||||||
|
- [`builtins.readFileType`](./builtins.md#builtins-readFileType)` expr`
|
||||||
|
- [`builtins.readDir`](./builtins.md#builtins-readDir)` expr`
|
||||||
|
- [`builtins.pathExists`](./builtins.md#builtins-pathExists)` expr`
|
||||||
|
- [`builtins.filterSource`](./builtins.md#builtins-filterSource)` f expr`
|
||||||
|
- [`builtins.path`](./builtins.md#builtins-path)` { path = expr; }`
|
||||||
|
- [`builtins.hashFile`](./builtins.md#builtins-hashFile)` t expr`
|
||||||
|
- `builtins.scopedImport x drv`
|
||||||
|
|
||||||
|
When the store path needs to be accessed, evaluation will be paused, the corresponding store object [realised], and then evaluation resumed.
|
||||||
|
|
||||||
|
[realised]: @docroot@/glossary.md#gloss-realise
|
||||||
|
|
||||||
|
This has performance implications:
|
||||||
|
Evaluation can only finish when all required store objects are realised.
|
||||||
|
Since the Nix language evaluator is sequential, it only finds store paths to read from one at a time.
|
||||||
|
While realisation is always parallel, in this case it cannot be done for all required store paths at once, and is therefore much slower than otherwise.
|
||||||
|
|
||||||
|
Realising store objects during evaluation can be disabled by setting [`allow-import-from-derivation`](../command-ref/conf-file.md#conf-allow-import-from-derivation) to `false`.
|
||||||
|
Without IFD it is ensured that evaluation is complete and Nix can produce a build plan before starting any realisation.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
In the following Nix expression, the inner derivation `drv` produces a file with contents `hello`.
|
||||||
|
|
||||||
|
```nix
|
||||||
|
# IFD.nix
|
||||||
|
let
|
||||||
|
drv = derivation {
|
||||||
|
name = "hello";
|
||||||
|
builder = "/bin/sh";
|
||||||
|
args = [ "-c" "echo -n hello > $out" ];
|
||||||
|
system = builtins.currentSystem;
|
||||||
|
};
|
||||||
|
in "${builtins.readFile drv} world"
|
||||||
|
```
|
||||||
|
|
||||||
|
```shellSession
|
||||||
|
nix-instantiate IFD.nix --eval --read-write-mode
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
building '/nix/store/348q1cal6sdgfxs8zqi9v8llrsn4kqkq-hello.drv'...
|
||||||
|
"hello world"
|
||||||
|
```
|
||||||
|
|
||||||
|
The contents of the derivation's output have to be [realised] before they can be read with [`readFile`](./builtins.md#builtins-readFile).
|
||||||
|
Only then evaluation can continue to produce the final result.
|
||||||
|
|
||||||
|
## Illustration
|
||||||
|
|
||||||
|
As a first approximation, the following data flow graph shows how evaluation and building are interleaved, if the value of a Nix expression depends on realising a [store object].
|
||||||
|
Boxes are data structures, arrow labels are transformations.
|
||||||
|
|
||||||
|
```
|
||||||
|
+----------------------+ +------------------------+
|
||||||
|
| Nix evaluator | | Nix store |
|
||||||
|
| .----------------. | | |
|
||||||
|
| | Nix expression | | | |
|
||||||
|
| '----------------' | | |
|
||||||
|
| | | | |
|
||||||
|
| evaluate | | |
|
||||||
|
| | | | |
|
||||||
|
| V | | |
|
||||||
|
| .------------. | | .------------------. |
|
||||||
|
| | derivation |----|-instantiate-|->| store derivation | |
|
||||||
|
| '------------' | | '------------------' |
|
||||||
|
| | | | |
|
||||||
|
| | | realise |
|
||||||
|
| | | | |
|
||||||
|
| | | V |
|
||||||
|
| .----------------. | | .--------------. |
|
||||||
|
| | Nix expression |<-|----read-----|----| store object | |
|
||||||
|
| '----------------' | | '--------------' |
|
||||||
|
| | | | |
|
||||||
|
| evaluate | | |
|
||||||
|
| | | | |
|
||||||
|
| V | | |
|
||||||
|
| .------------. | | |
|
||||||
|
| | value | | | |
|
||||||
|
| '------------' | | |
|
||||||
|
+----------------------+ +------------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
In more detail, the following sequence diagram shows how the expression is evaluated step by step, and where evaluation is blocked to wait for the build output to appear.
|
||||||
|
|
||||||
|
```
|
||||||
|
.-------. .-------------. .---------.
|
||||||
|
|Nix CLI| |Nix evaluator| |Nix store|
|
||||||
|
'-------' '-------------' '---------'
|
||||||
|
| | |
|
||||||
|
|evaluate IFD.nix| |
|
||||||
|
|--------------->| |
|
||||||
|
| | |
|
||||||
|
| evaluate `"${readFile drv} world"` |
|
||||||
|
| | |
|
||||||
|
| evaluate `readFile drv` |
|
||||||
|
| | |
|
||||||
|
| evaluate `drv` as string |
|
||||||
|
| | |
|
||||||
|
| |instantiate /nix/store/...-hello.drv|
|
||||||
|
| |----------------------------------->|
|
||||||
|
| : |
|
||||||
|
| : realise /nix/store/...-hello.drv |
|
||||||
|
| :----------------------------------->|
|
||||||
|
| : |
|
||||||
|
| |--------.
|
||||||
|
| : | |
|
||||||
|
| (evaluation blocked) | echo hello > $out
|
||||||
|
| : | |
|
||||||
|
| |<-------'
|
||||||
|
| : /nix/store/...-hello |
|
||||||
|
| |<-----------------------------------|
|
||||||
|
| | |
|
||||||
|
| resume `readFile /nix/store/...-hello` |
|
||||||
|
| | |
|
||||||
|
| | readFile /nix/store/...-hello |
|
||||||
|
| |----------------------------------->|
|
||||||
|
| | |
|
||||||
|
| | hello |
|
||||||
|
| |<-----------------------------------|
|
||||||
|
| | |
|
||||||
|
| resume `"${"hello"} world"` |
|
||||||
|
| | |
|
||||||
|
| resume `"hello world"` |
|
||||||
|
| | |
|
||||||
|
| "hello world" | |
|
||||||
|
|<---------------| |
|
||||||
|
.-------. .-------------. .---------.
|
||||||
|
|Nix CLI| |Nix evaluator| |Nix store|
|
||||||
|
'-------' '-------------' '---------'
|
||||||
|
```
|
|
@ -35,6 +35,8 @@
|
||||||
|
|
||||||
## Attribute selection
|
## Attribute selection
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *attrset* `.` *attrpath* \[ `or` *expr* \]
|
> *attrset* `.` *attrpath* \[ `or` *expr* \]
|
||||||
|
|
||||||
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
||||||
|
@ -42,12 +44,16 @@ If the attribute doesn’t exist, return the *expr* after `or` if provided, othe
|
||||||
|
|
||||||
An attribute path is a dot-separated list of [attribute names](./values.md#attribute-set).
|
An attribute path is a dot-separated list of [attribute names](./values.md#attribute-set).
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *attrpath* = *name* [ `.` *name* ]...
|
> *attrpath* = *name* [ `.` *name* ]...
|
||||||
|
|
||||||
[Attribute selection]: #attribute-selection
|
[Attribute selection]: #attribute-selection
|
||||||
|
|
||||||
## Has attribute
|
## Has attribute
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *attrset* `?` *attrpath*
|
> *attrset* `?` *attrpath*
|
||||||
|
|
||||||
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
|
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
|
||||||
|
@ -70,6 +76,8 @@ The `+` operator is overloaded to also work on strings and paths.
|
||||||
|
|
||||||
## String concatenation
|
## String concatenation
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *string* `+` *string*
|
> *string* `+` *string*
|
||||||
|
|
||||||
Concatenate two [string]s and merge their string contexts.
|
Concatenate two [string]s and merge their string contexts.
|
||||||
|
@ -78,6 +86,8 @@ Concatenate two [string]s and merge their string contexts.
|
||||||
|
|
||||||
## Path concatenation
|
## Path concatenation
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *path* `+` *path*
|
> *path* `+` *path*
|
||||||
|
|
||||||
Concatenate two [path]s.
|
Concatenate two [path]s.
|
||||||
|
@ -87,6 +97,8 @@ The result is a path.
|
||||||
|
|
||||||
## Path and string concatenation
|
## Path and string concatenation
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *path* + *string*
|
> *path* + *string*
|
||||||
|
|
||||||
Concatenate *[path]* with *[string]*.
|
Concatenate *[path]* with *[string]*.
|
||||||
|
@ -100,6 +112,8 @@ The result is a path.
|
||||||
|
|
||||||
## String and path concatenation
|
## String and path concatenation
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *string* + *path*
|
> *string* + *path*
|
||||||
|
|
||||||
Concatenate *[string]* with *[path]*.
|
Concatenate *[string]* with *[path]*.
|
||||||
|
@ -117,6 +131,8 @@ The result is a string.
|
||||||
|
|
||||||
## Update
|
## Update
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *attrset1* // *attrset2*
|
> *attrset1* // *attrset2*
|
||||||
|
|
||||||
Update [attribute set] *attrset1* with names and values from *attrset2*.
|
Update [attribute set] *attrset1* with names and values from *attrset2*.
|
||||||
|
|
|
@ -1,19 +1,12 @@
|
||||||
# String interpolation
|
# String interpolation
|
||||||
|
|
||||||
String interpolation is a language feature where a [string], [path], or [attribute name] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
|
String interpolation is a language feature where a [string], [path], or [attribute name][attribute set] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
|
||||||
|
|
||||||
Such a string is an *interpolated string*, and an expression inside is an *interpolated expression*.
|
Such a construct is called *interpolated string*, and the expression inside is an [interpolated expression](#interpolated-expression).
|
||||||
|
|
||||||
Interpolated expressions must evaluate to one of the following:
|
|
||||||
|
|
||||||
- a [string]
|
|
||||||
- a [path]
|
|
||||||
- a [derivation]
|
|
||||||
|
|
||||||
[string]: ./values.md#type-string
|
[string]: ./values.md#type-string
|
||||||
[path]: ./values.md#type-path
|
[path]: ./values.md#type-path
|
||||||
[attribute name]: ./values.md#attribute-set
|
[attribute set]: ./values.md#attribute-set
|
||||||
[derivation]: ../glossary.md#gloss-derivation
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
|
@ -70,13 +63,136 @@ you can instead write
|
||||||
|
|
||||||
### Attribute name
|
### Attribute name
|
||||||
|
|
||||||
Attribute names can be created dynamically with string interpolation:
|
<!--
|
||||||
|
FIXME: these examples are redundant with the main page on attribute sets.
|
||||||
|
figure out what to do about that
|
||||||
|
-->
|
||||||
|
|
||||||
```nix
|
Attribute names can be interpolated strings.
|
||||||
let name = "foo"; in
|
|
||||||
{
|
|
||||||
${name} = "bar";
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
{ foo = "bar"; }
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let name = "foo"; in
|
||||||
|
> { ${name} = 123; }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> { foo = 123; }
|
||||||
|
|
||||||
|
Attributes can be selected with interpolated strings.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let name = "foo"; in
|
||||||
|
> { foo = 123; }.${name}
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> 123
|
||||||
|
|
||||||
|
# Interpolated expression
|
||||||
|
|
||||||
|
An expression that is interpolated must evaluate to one of the following:
|
||||||
|
|
||||||
|
- a [string]
|
||||||
|
- a [path]
|
||||||
|
- an [attribute set] that has a `__toString` attribute or an `outPath` attribute
|
||||||
|
|
||||||
|
- `__toString` must be a function that takes the attribute set itself and returns a string
|
||||||
|
- `outPath` must be a string
|
||||||
|
|
||||||
|
This includes [derivations](./derivations.md) or [flake inputs](@docroot@/command-ref/new-cli/nix3-flake.md#flake-inputs) (experimental).
|
||||||
|
|
||||||
|
A string interpolates to itself.
|
||||||
|
|
||||||
|
A path in an interpolated expression is first copied into the Nix store, and the resulting string is the [store path] of the newly created [store object](../glossary.md#gloss-store-object).
|
||||||
|
|
||||||
|
[store path]: ../glossary.md#gloss-store-path
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```console
|
||||||
|
> $ mkdir foo
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> Reference the empty directory in an interpolated expression:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> "${./foo}"
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> "/nix/store/2hhl2nz5v0khbn06ys82nrk99aa1xxdw-foo"
|
||||||
|
|
||||||
|
A derivation interpolates to the [store path] of its first [output](./derivations.md#attr-outputs).
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let
|
||||||
|
> pkgs = import <nixpkgs> {};
|
||||||
|
> in
|
||||||
|
> "${pkgs.hello}"
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> "/nix/store/4xpfqf29z4m8vbhrqcz064wfmb46w5r7-hello-2.12.1"
|
||||||
|
|
||||||
|
An attribute set interpolates to the return value of the function in the `__toString` applied to the attribute set itself.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let
|
||||||
|
> a = {
|
||||||
|
> value = 1;
|
||||||
|
> __toString = self: toString (self.value + 1);
|
||||||
|
> };
|
||||||
|
> in
|
||||||
|
> "${a}"
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> "2"
|
||||||
|
|
||||||
|
An attribute set also interpolates to the value of its `outPath` attribute.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let
|
||||||
|
> a = { outPath = "foo"; };
|
||||||
|
> in
|
||||||
|
> "${a}"
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> "foo"
|
||||||
|
|
||||||
|
If both `__toString` and `outPath` are present in an attribute set, `__toString` takes precedence.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let
|
||||||
|
> a = { __toString = _: "yes"; outPath = throw "no"; };
|
||||||
|
> in
|
||||||
|
> "${a}"
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> "yes"
|
||||||
|
|
||||||
|
If neither is present, an error is thrown.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> let
|
||||||
|
> a = {};
|
||||||
|
> in
|
||||||
|
> "${a}"
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> error: cannot coerce a set to a string
|
||||||
|
>
|
||||||
|
> at «string»:4:2:
|
||||||
|
>
|
||||||
|
> 3| in
|
||||||
|
> 4| "${a}"
|
||||||
|
> | ^
|
||||||
|
|
|
@ -107,29 +107,24 @@
|
||||||
e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
|
e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
|
||||||
whose home directory is `/home/edolstra`.
|
whose home directory is `/home/edolstra`.
|
||||||
|
|
||||||
Paths can also be specified between angle brackets, e.g.
|
|
||||||
`<nixpkgs>`. This means that the directories listed in the
|
|
||||||
environment variable `NIX_PATH` will be searched for the given file
|
|
||||||
or directory name.
|
|
||||||
|
|
||||||
When an [interpolated string][string interpolation] evaluates to a path, the path is first copied into the Nix store and the resulting string is the [store path] of the newly created [store object].
|
|
||||||
|
|
||||||
[store path]: ../glossary.md#gloss-store-path
|
|
||||||
[store object]: ../glossary.md#gloss-store-object
|
|
||||||
|
|
||||||
For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/<hash>-foo.txt"`.
|
For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/<hash>-foo.txt"`.
|
||||||
|
|
||||||
Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression.
|
Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression.
|
||||||
For example, assume you used a file path in an interpolated string during a `nix repl` session.
|
For example, assume you used a file path in an interpolated string during a `nix repl` session.
|
||||||
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new store path, since Nix might not re-read the file contents.
|
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new [store path], since Nix might not re-read the file contents.
|
||||||
|
|
||||||
Paths themselves, except those in angle brackets (`< >`), support [string interpolation].
|
[store path]: ../glossary.md#gloss-store-path
|
||||||
|
|
||||||
|
Paths can include [string interpolation] and can themselves be [interpolated in other expressions].
|
||||||
|
[interpolated in other expressions]: ./string-interpolation.md#interpolated-expressions
|
||||||
|
|
||||||
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
||||||
|
|
||||||
`a.${foo}/b.${bar}` is a syntactically valid division operation.
|
`a.${foo}/b.${bar}` is a syntactically valid division operation.
|
||||||
`./a.${foo}/b.${bar}` is a path.
|
`./a.${foo}/b.${bar}` is a path.
|
||||||
|
|
||||||
|
[Lookup paths](./constructs/lookup-path.md) such as `<nixpkgs>` resolve to path values.
|
||||||
|
|
||||||
- <a id="type-boolean" href="#type-boolean">Boolean</a>
|
- <a id="type-boolean" href="#type-boolean">Boolean</a>
|
||||||
|
|
||||||
*Booleans* with values `true` and `false`.
|
*Booleans* with values `true` and `false`.
|
||||||
|
@ -167,13 +162,17 @@ An attribute set is a collection of name-value-pairs (called *attributes*) enclo
|
||||||
An attribute name can be an identifier or a [string](#string).
|
An attribute name can be an identifier or a [string](#string).
|
||||||
An identifier must start with a letter (`a-z`, `A-Z`) or underscore (`_`), and can otherwise contain letters (`a-z`, `A-Z`), numbers (`0-9`), underscores (`_`), apostrophes (`'`), or dashes (`-`).
|
An identifier must start with a letter (`a-z`, `A-Z`) or underscore (`_`), and can otherwise contain letters (`a-z`, `A-Z`), numbers (`0-9`), underscores (`_`), apostrophes (`'`), or dashes (`-`).
|
||||||
|
|
||||||
|
> **Syntax**
|
||||||
|
>
|
||||||
> *name* = *identifier* | *string* \
|
> *name* = *identifier* | *string* \
|
||||||
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
|
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
|
||||||
|
|
||||||
Names and values are separated by an equal sign (`=`).
|
Names and values are separated by an equal sign (`=`).
|
||||||
Each value is an arbitrary expression terminated by a semicolon (`;`).
|
Each value is an arbitrary expression terminated by a semicolon (`;`).
|
||||||
|
|
||||||
> *attrset* = `{` [ *name* `=` *expr* `;` `]`... `}`
|
> **Syntax**
|
||||||
|
>
|
||||||
|
> *attrset* = `{` [ *name* `=` *expr* `;` ]... `}`
|
||||||
|
|
||||||
Attributes can appear in any order.
|
Attributes can appear in any order.
|
||||||
An attribute name may only occur once.
|
An attribute name may only occur once.
|
||||||
|
|
|
@ -1 +1,12 @@
|
||||||
# Nix Release Notes
|
# Nix Release Notes
|
||||||
|
|
||||||
|
Nix has a release cycle of roughly 6 weeks.
|
||||||
|
Notable changes and additions are announced in the release notes for each version.
|
||||||
|
|
||||||
|
Bugfixes can be backported on request to previous Nix releases.
|
||||||
|
We typically backport only as far back as the Nix version used in the latest NixOS release, which is announced in the [NixOS release notes](https://nixos.org/manual/nixos/stable/release-notes.html#ch-release-notes).
|
||||||
|
|
||||||
|
Backports never skip releases.
|
||||||
|
If a feature is backported to version `x.y`, it must also be available in version `x.(y+1)`.
|
||||||
|
This ensures that upgrading from an older version with backports is still safe and no backported functionality will go missing.
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
* On Linux, Nix can now run builds in a user namespace where they run
|
* On Linux, Nix can now run builds in a user namespace where they run
|
||||||
as root (UID 0) and have 65,536 UIDs available.
|
as root (UID 0) and have 65,536 UIDs available.
|
||||||
<!-- FIXME: move this to its own section about system features -->
|
|
||||||
This is primarily useful for running containers such as `systemd-nspawn`
|
This is primarily useful for running containers such as `systemd-nspawn`
|
||||||
inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
|
inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
|
||||||
|
|
||||||
|
|
|
@ -1 +1,9 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
- [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters.
|
||||||
|
|
||||||
|
- [Path-like flake references](@docroot@/command-ref/new-cli/nix3-flake.md#path-like-syntax) now accept arbitrary unicode characters (except `#` and `?`).
|
||||||
|
|
||||||
|
- The experimental feature `repl-flake` is no longer needed, as its functionality is now part of the `flakes` experimental feature. To get the previous behavior, use the `--file/--expr` flags accordingly.
|
||||||
|
|
||||||
|
- Introduce new flake installable syntax `flakeref#.attrPath` where the "." prefix denotes no searching of default attribute prefixes like `packages.<SYSTEM>` or `legacyPackages.<SYSTEM>`.
|
|
@ -44,63 +44,6 @@ rec {
|
||||||
|
|
||||||
optionalString = cond: string: if cond then string else "";
|
optionalString = cond: string: if cond then string else "";
|
||||||
|
|
||||||
showSetting = { useAnchors }: name: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
|
|
||||||
let
|
|
||||||
result = squash ''
|
|
||||||
- ${if useAnchors
|
|
||||||
then ''<span id="conf-${name}">[`${name}`](#conf-${name})</span>''
|
|
||||||
else ''`${name}`''}
|
|
||||||
|
|
||||||
${indent " " body}
|
|
||||||
'';
|
|
||||||
|
|
||||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
|
||||||
> **Warning**
|
|
||||||
> This setting is part of an
|
|
||||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
|
||||||
|
|
||||||
To change this setting, you need to make sure the corresponding experimental feature,
|
|
||||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
|
||||||
is enabled.
|
|
||||||
For example, include the following in [`nix.conf`](#):
|
|
||||||
|
|
||||||
```
|
|
||||||
extra-experimental-features = ${experimentalFeature}
|
|
||||||
${name} = ...
|
|
||||||
```
|
|
||||||
'';
|
|
||||||
|
|
||||||
# separate body to cleanly handle indentation
|
|
||||||
body = ''
|
|
||||||
${description}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
|
||||||
|
|
||||||
**Default:** ${showDefault documentDefault defaultValue}
|
|
||||||
|
|
||||||
${showAliases aliases}
|
|
||||||
'';
|
|
||||||
|
|
||||||
showDefault = documentDefault: defaultValue:
|
|
||||||
if documentDefault then
|
|
||||||
# a StringMap value type is specified as a string, but
|
|
||||||
# this shows the value type. The empty stringmap is `null` in
|
|
||||||
# JSON, but that converts to `{ }` here.
|
|
||||||
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
|
|
||||||
then "*empty*"
|
|
||||||
else if isBool defaultValue then
|
|
||||||
if defaultValue then "`true`" else "`false`"
|
|
||||||
else "`${toString defaultValue}`"
|
|
||||||
else "*machine-specific*";
|
|
||||||
|
|
||||||
showAliases = aliases:
|
|
||||||
optionalString (aliases != [])
|
|
||||||
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
|
||||||
|
|
||||||
in result;
|
|
||||||
|
|
||||||
indent = prefix: s:
|
indent = prefix: s:
|
||||||
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||||
|
|
||||||
showSettings = args: settingsInfo: concatStrings (attrValues (mapAttrs (showSetting args) settingsInfo));
|
|
||||||
}
|
}
|
||||||
|
|
12
flake.lock
12
flake.lock
|
@ -34,16 +34,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1695124524,
|
"lastModified": 1695283060,
|
||||||
"narHash": "sha256-trXDytVCqf3KryQQQrHOZKUabu1/lB8/ndOAuZKQrOE=",
|
"narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=",
|
||||||
"owner": "edolstra",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "a3d30b525535e3158221abc1a957ce798ab159fe",
|
"rev": "31ed632c692e6a36cfc18083b88ece892f863ed4",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "edolstra",
|
"owner": "NixOS",
|
||||||
"ref": "fix-aws-sdk-cpp",
|
"ref": "nixos-23.05-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
197
flake.nix
197
flake.nix
|
@ -1,8 +1,7 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager - but super!";
|
description = "The purely functional package manager - but super!";
|
||||||
|
|
||||||
#inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
||||||
inputs.nixpkgs.url = "github:edolstra/nixpkgs/fix-aws-sdk-cpp";
|
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||||
|
@ -25,8 +24,11 @@
|
||||||
linuxSystems = linux32BitSystems ++ linux64BitSystems;
|
linuxSystems = linux32BitSystems ++ linux64BitSystems;
|
||||||
darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ];
|
darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ];
|
||||||
systems = linuxSystems ++ darwinSystems;
|
systems = linuxSystems ++ darwinSystems;
|
||||||
|
|
||||||
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
crossSystems = [
|
||||||
|
"armv6l-linux" "armv7l-linux"
|
||||||
|
"x86_64-freebsd13" "x86_64-netbsd"
|
||||||
|
];
|
||||||
|
|
||||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
||||||
|
|
||||||
|
@ -57,44 +59,55 @@
|
||||||
# that would interfere with repo semantics.
|
# that would interfere with repo semantics.
|
||||||
fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
||||||
|
|
||||||
|
configureFiles = fileset.unions [
|
||||||
|
./.version
|
||||||
|
./configure.ac
|
||||||
|
./m4
|
||||||
|
# TODO: do we really need README.md? It doesn't seem used in the build.
|
||||||
|
./README.md
|
||||||
|
];
|
||||||
|
|
||||||
|
topLevelBuildFiles = fileset.unions [
|
||||||
|
./local.mk
|
||||||
|
./Makefile
|
||||||
|
./Makefile.config.in
|
||||||
|
./mk
|
||||||
|
];
|
||||||
|
|
||||||
|
functionalTestFiles = fileset.unions [
|
||||||
|
./tests/functional
|
||||||
|
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
||||||
|
];
|
||||||
|
|
||||||
nixSrc = fileset.toSource {
|
nixSrc = fileset.toSource {
|
||||||
root = ./.;
|
root = ./.;
|
||||||
fileset = fileset.intersect baseFiles (
|
fileset = fileset.intersect baseFiles (fileset.unions [
|
||||||
fileset.difference
|
configureFiles
|
||||||
(fileset.unions [
|
topLevelBuildFiles
|
||||||
./.version
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
./doc
|
||||||
./bootstrap.sh
|
./misc
|
||||||
./configure.ac
|
./precompiled-headers.h
|
||||||
./doc
|
./src
|
||||||
./local.mk
|
./unit-test-data
|
||||||
./m4
|
./COPYING
|
||||||
./Makefile
|
./scripts/local.mk
|
||||||
./Makefile.config.in
|
functionalTestFiles
|
||||||
./misc
|
]);
|
||||||
./mk
|
|
||||||
./precompiled-headers.h
|
|
||||||
./src
|
|
||||||
./tests
|
|
||||||
./COPYING
|
|
||||||
./scripts/local.mk
|
|
||||||
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
|
||||||
# TODO: do we really need README.md? It doesn't seem used in the build.
|
|
||||||
./README.md
|
|
||||||
])
|
|
||||||
(fileset.unions [
|
|
||||||
# Removed file sets
|
|
||||||
./tests/nixos
|
|
||||||
./tests/installer
|
|
||||||
])
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# Memoize nixpkgs for different platforms for efficiency.
|
# Memoize nixpkgs for different platforms for efficiency.
|
||||||
nixpkgsFor = forAllSystems
|
nixpkgsFor = forAllSystems
|
||||||
(system: let
|
(system: let
|
||||||
make-pkgs = crossSystem: stdenv: import nixpkgs {
|
make-pkgs = crossSystem: stdenv: import nixpkgs {
|
||||||
inherit system crossSystem;
|
localSystem = {
|
||||||
|
inherit system;
|
||||||
|
};
|
||||||
|
crossSystem = if crossSystem == null then null else {
|
||||||
|
system = crossSystem;
|
||||||
|
} // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") {
|
||||||
|
useLLVM = true;
|
||||||
|
};
|
||||||
overlays = [
|
overlays = [
|
||||||
(overlayFor (p: p.${stdenv}))
|
(overlayFor (p: p.${stdenv}))
|
||||||
];
|
];
|
||||||
|
@ -180,9 +193,9 @@
|
||||||
libarchive
|
libarchive
|
||||||
boost
|
boost
|
||||||
lowdown-nix
|
lowdown-nix
|
||||||
|
libsodium
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
++ lib.optionals stdenv.isLinux [libseccomp]
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
|
||||||
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
||||||
|
|
||||||
checkDeps = [
|
checkDeps = [
|
||||||
|
@ -258,7 +271,14 @@
|
||||||
"-${client.version}-against-${daemon.version}";
|
"-${client.version}-against-${daemon.version}";
|
||||||
inherit version;
|
inherit version;
|
||||||
|
|
||||||
src = nixSrc;
|
src = fileset.toSource {
|
||||||
|
root = ./.;
|
||||||
|
fileset = fileset.intersect baseFiles (fileset.unions [
|
||||||
|
configureFiles
|
||||||
|
topLevelBuildFiles
|
||||||
|
functionalTestFiles
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
VERSION_SUFFIX = versionSuffix;
|
VERSION_SUFFIX = versionSuffix;
|
||||||
|
|
||||||
|
@ -268,7 +288,9 @@
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
configureFlags = testConfigureFlags; # otherwise configure fails
|
configureFlags =
|
||||||
|
testConfigureFlags # otherwise configure fails
|
||||||
|
++ [ "--disable-build" ];
|
||||||
dontBuild = true;
|
dontBuild = true;
|
||||||
doInstallCheck = true;
|
doInstallCheck = true;
|
||||||
|
|
||||||
|
@ -276,7 +298,10 @@
|
||||||
mkdir -p $out
|
mkdir -p $out
|
||||||
'';
|
'';
|
||||||
|
|
||||||
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
|
installCheckPhase = ''
|
||||||
|
mkdir -p src/nix-channel
|
||||||
|
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
|
||||||
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
binaryTarball = nix: pkgs:
|
binaryTarball = nix: pkgs:
|
||||||
|
@ -452,7 +477,15 @@
|
||||||
passthru.perl-bindings = with final; perl.pkgs.toPerlModule (currentStdenv.mkDerivation {
|
passthru.perl-bindings = with final; perl.pkgs.toPerlModule (currentStdenv.mkDerivation {
|
||||||
name = "nix-super-perl-${version}";
|
name = "nix-super-perl-${version}";
|
||||||
|
|
||||||
src = self;
|
src = fileset.toSource {
|
||||||
|
root = ./.;
|
||||||
|
fileset = fileset.intersect baseFiles (fileset.unions [
|
||||||
|
./perl
|
||||||
|
./.version
|
||||||
|
./m4
|
||||||
|
./mk
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
nativeBuildInputs =
|
nativeBuildInputs =
|
||||||
[ buildPackages.autoconf-archive
|
[ buildPackages.autoconf-archive
|
||||||
|
@ -502,18 +535,6 @@
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
nixos-lib = import (nixpkgs + "/nixos/lib") { };
|
|
||||||
|
|
||||||
# https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests
|
|
||||||
runNixOSTestFor = system: test: nixos-lib.runTest {
|
|
||||||
imports = [ test ];
|
|
||||||
hostPkgs = nixpkgsFor.${system}.native;
|
|
||||||
defaults = {
|
|
||||||
nixpkgs.pkgs = nixpkgsFor.${system}.native;
|
|
||||||
};
|
|
||||||
_module.args.nixpkgs = nixpkgs;
|
|
||||||
};
|
|
||||||
|
|
||||||
in {
|
in {
|
||||||
# A Nixpkgs overlay that overrides the 'nix' and
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
# 'nix.perl-bindings' packages.
|
# 'nix.perl-bindings' packages.
|
||||||
|
@ -620,49 +641,29 @@
|
||||||
};
|
};
|
||||||
|
|
||||||
# System tests.
|
# System tests.
|
||||||
tests.authorization = runNixOSTestFor "x86_64-linux" ./tests/nixos/authorization.nix;
|
tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // {
|
||||||
|
|
||||||
tests.remoteBuilds = runNixOSTestFor "x86_64-linux" ./tests/nixos/remote-builds.nix;
|
# Make sure that nix-env still produces the exact same result
|
||||||
|
# on a particular version of Nixpkgs.
|
||||||
|
evalNixpkgs =
|
||||||
|
with nixpkgsFor.x86_64-linux.native;
|
||||||
|
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||||
|
''
|
||||||
|
type -p nix-env
|
||||||
|
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||||
|
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
||||||
|
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
||||||
|
mkdir $out
|
||||||
|
'';
|
||||||
|
|
||||||
tests.nix-copy-closure = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy-closure.nix;
|
nixpkgsLibTests =
|
||||||
|
forAllSystems (system:
|
||||||
tests.nix-copy = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy.nix;
|
import (nixpkgs + "/lib/tests/release.nix")
|
||||||
|
{ pkgs = nixpkgsFor.${system}.native;
|
||||||
tests.nssPreload = runNixOSTestFor "x86_64-linux" ./tests/nixos/nss-preload.nix;
|
nixVersions = [ self.packages.${system}.nix ];
|
||||||
|
}
|
||||||
tests.githubFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/github-flakes.nix;
|
);
|
||||||
|
};
|
||||||
tests.sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/sourcehut-flakes.nix;
|
|
||||||
|
|
||||||
tests.tarballFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/tarball-flakes.nix;
|
|
||||||
|
|
||||||
tests.containers = runNixOSTestFor "x86_64-linux" ./tests/nixos/containers/containers.nix;
|
|
||||||
|
|
||||||
tests.setuid = lib.genAttrs
|
|
||||||
["i686-linux" "x86_64-linux"]
|
|
||||||
(system: runNixOSTestFor system ./tests/nixos/setuid.nix);
|
|
||||||
|
|
||||||
|
|
||||||
# Make sure that nix-env still produces the exact same result
|
|
||||||
# on a particular version of Nixpkgs.
|
|
||||||
tests.evalNixpkgs =
|
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
|
||||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
|
||||||
''
|
|
||||||
type -p nix-env
|
|
||||||
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
|
||||||
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
|
||||||
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
|
||||||
mkdir $out
|
|
||||||
'';
|
|
||||||
|
|
||||||
tests.nixpkgsLibTests =
|
|
||||||
forAllSystems (system:
|
|
||||||
import (nixpkgs + "/lib/tests/release.nix")
|
|
||||||
{ pkgs = nixpkgsFor.${system}.native;
|
|
||||||
nixVersions = [ self.packages.${system}.nix ];
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||||
pkgs = nixpkgsFor.x86_64-linux.native;
|
pkgs = nixpkgsFor.x86_64-linux.native;
|
||||||
|
@ -733,6 +734,9 @@
|
||||||
|
|
||||||
devShells = let
|
devShells = let
|
||||||
makeShell = pkgs: stdenv:
|
makeShell = pkgs: stdenv:
|
||||||
|
let
|
||||||
|
canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
||||||
|
in
|
||||||
with commonDeps { inherit pkgs; };
|
with commonDeps { inherit pkgs; };
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation {
|
||||||
name = "nix-super";
|
name = "nix-super";
|
||||||
|
@ -740,13 +744,18 @@
|
||||||
outputs = [ "out" "dev" "doc" ];
|
outputs = [ "out" "dev" "doc" ];
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps
|
nativeBuildInputs = nativeBuildDeps
|
||||||
++ (lib.optionals stdenv.cc.isClang [ pkgs.bear pkgs.clang-tools ]);
|
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
||||||
|
++ lib.optional
|
||||||
|
(stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
|
||||||
|
pkgs.buildPackages.clang-tools
|
||||||
|
;
|
||||||
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps
|
buildInputs = buildDeps ++ propagatedDeps
|
||||||
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
||||||
|
|
||||||
configureFlags = configureFlags
|
configureFlags = configureFlags
|
||||||
++ testConfigureFlags ++ internalApiDocsConfigureFlags;
|
++ testConfigureFlags ++ internalApiDocsConfigureFlags
|
||||||
|
++ lib.optional (!canRunInstalled) "--disable-doc-gen";
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
|
2
local.mk
2
local.mk
|
@ -1,5 +1,3 @@
|
||||||
clean-files += Makefile.config
|
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch
|
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch
|
||||||
# Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers.
|
# Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers.
|
||||||
ERROR_SWITCH_ENUM = -Werror=switch-enum
|
ERROR_SWITCH_ENUM = -Werror=switch-enum
|
||||||
|
|
|
@ -96,8 +96,10 @@ What constitutes a trivial pull request is up to maintainers' judgement.
|
||||||
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||||
|
|
||||||
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||||
|
Whenever the discussion opens up questions about the process or this team's goals, this may indicate that the change is too large in scope.
|
||||||
|
In that case it is taken off the board to be reconsidered by the author or broken down into smaller pieces that are less far-reaching and can be reviewed independently.
|
||||||
|
|
||||||
As a general guideline, the order of items is determined as follows:
|
As a general guideline, the order of items to discuss is determined as follows:
|
||||||
|
|
||||||
- Prioritise pull requests over issues
|
- Prioritise pull requests over issues
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,15 @@
|
||||||
|
test_dir=tests/functional
|
||||||
|
|
||||||
|
test=$(echo -n "$test" | sed -e "s|^$test_dir/||")
|
||||||
|
|
||||||
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
||||||
|
|
||||||
: ${BASH:=/usr/bin/env bash}
|
: ${BASH:=/usr/bin/env bash}
|
||||||
|
|
||||||
init_test () {
|
init_test () {
|
||||||
cd tests && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
cd "$test_dir" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
||||||
}
|
}
|
||||||
|
|
||||||
run_test_proper () {
|
run_test_proper () {
|
||||||
cd $(dirname $test) && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
cd "$test_dir/$(dirname $test)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,6 +87,6 @@ define build-program
|
||||||
# Phony target to run this program (typically as a dependency of 'check').
|
# Phony target to run this program (typically as a dependency of 'check').
|
||||||
.PHONY: $(1)_RUN
|
.PHONY: $(1)_RUN
|
||||||
$(1)_RUN: $$($(1)_PATH)
|
$(1)_RUN: $$($(1)_PATH)
|
||||||
$(trace-test) $$($(1)_PATH)
|
$(trace-test) $$(UNIT_TEST_ENV) $$($(1)_PATH)
|
||||||
|
|
||||||
endef
|
endef
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
makefiles = local.mk
|
makefiles = local.mk
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -g -Wall -std=c++2a -I ../src
|
GLOBAL_CXXFLAGS += -g -Wall -std=c++2a
|
||||||
|
|
||||||
|
# A convenience for concurrent development of Nix and its Perl bindings.
|
||||||
|
# Not needed in a standalone build of the Perl bindings.
|
||||||
|
ifneq ("$(wildcard ../src)", "")
|
||||||
|
GLOBAL_CXXFLAGS += -I ../src
|
||||||
|
endif
|
||||||
|
|
||||||
-include Makefile.config
|
-include Makefile.config
|
||||||
|
|
||||||
|
|
|
@ -452,6 +452,14 @@ EOF
|
||||||
# a row for different files.
|
# a row for different files.
|
||||||
if [ -e "$profile_target$PROFILE_BACKUP_SUFFIX" ]; then
|
if [ -e "$profile_target$PROFILE_BACKUP_SUFFIX" ]; then
|
||||||
# this backup process first released in Nix 2.1
|
# this backup process first released in Nix 2.1
|
||||||
|
|
||||||
|
if diff -q "$profile_target$PROFILE_BACKUP_SUFFIX" "$profile_target" > /dev/null; then
|
||||||
|
# a backup file for the rc-file exist, but they are identical,
|
||||||
|
# so we can safely ignore it and overwrite it with the same
|
||||||
|
# content later
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
I back up shell profile/rc scripts before I add Nix to them.
|
I back up shell profile/rc scripts before I add Nix to them.
|
||||||
I need to back up $profile_target to $profile_target$PROFILE_BACKUP_SUFFIX,
|
I need to back up $profile_target to $profile_target$PROFILE_BACKUP_SUFFIX,
|
||||||
|
|
|
@ -98,7 +98,7 @@ EvalCommand::EvalCommand()
|
||||||
EvalCommand::~EvalCommand()
|
EvalCommand::~EvalCommand()
|
||||||
{
|
{
|
||||||
if (evalState)
|
if (evalState)
|
||||||
evalState->printStats();
|
evalState->maybePrintStats();
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<Store> EvalCommand::getEvalStore()
|
ref<Store> EvalCommand::getEvalStore()
|
||||||
|
|
|
@ -28,6 +28,11 @@ namespace nix {
|
||||||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||||
{
|
{
|
||||||
std::vector<std::string> res;
|
std::vector<std::string> res;
|
||||||
|
if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){
|
||||||
|
attrPaths.front().erase(0,1);
|
||||||
|
res.push_back(attrPaths.front());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
for (auto & prefix : prefixes)
|
for (auto & prefix : prefixes)
|
||||||
res.push_back(prefix + *attrPaths.begin());
|
res.push_back(prefix + *attrPaths.begin());
|
||||||
|
|
|
@ -383,11 +383,16 @@ void completeFlakeRefWithFragment(
|
||||||
? prefix
|
? prefix
|
||||||
: prefix.substr(hash + 1);
|
: prefix.substr(hash + 1);
|
||||||
|
|
||||||
|
std::string prefixRoot = "";
|
||||||
|
if (fragment.starts_with(".")){
|
||||||
|
fragment = fragment.substr(1);
|
||||||
|
prefixRoot = ".";
|
||||||
|
}
|
||||||
|
|
||||||
auto flakeRefS =
|
auto flakeRefS =
|
||||||
isAttrPath
|
isAttrPath
|
||||||
? std::string("flake:default")
|
? std::string("flake:default")
|
||||||
: std::string(prefix.substr(0, hash));
|
: std::string(prefix.substr(0, hash));
|
||||||
|
|
||||||
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
||||||
|
|
||||||
auto evalCache = openEvalCache(*evalState,
|
auto evalCache = openEvalCache(*evalState,
|
||||||
|
@ -395,6 +400,9 @@ void completeFlakeRefWithFragment(
|
||||||
|
|
||||||
auto root = evalCache->getRoot();
|
auto root = evalCache->getRoot();
|
||||||
|
|
||||||
|
if (prefixRoot == "."){
|
||||||
|
attrPathPrefixes.clear();
|
||||||
|
}
|
||||||
/* Complete 'fragment' relative to all the
|
/* Complete 'fragment' relative to all the
|
||||||
attrpath prefixes as well as the root of the
|
attrpath prefixes as well as the root of the
|
||||||
flake. */
|
flake. */
|
||||||
|
@ -422,7 +430,7 @@ void completeFlakeRefWithFragment(
|
||||||
if (isAttrPath)
|
if (isAttrPath)
|
||||||
completions->add(concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
completions->add(concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||||
else
|
else
|
||||||
completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
completions->add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -433,7 +441,7 @@ void completeFlakeRefWithFragment(
|
||||||
for (auto & attrPath : defaultFlakeAttrPaths) {
|
for (auto & attrPath : defaultFlakeAttrPaths) {
|
||||||
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
|
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
|
||||||
if (!attr) continue;
|
if (!attr) continue;
|
||||||
completions->add(flakeRefS + "#");
|
completions->add(flakeRefS + "#" + prefixRoot);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -922,7 +922,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||||
|
|
||||||
case nString:
|
case nString:
|
||||||
str << ANSI_WARNING;
|
str << ANSI_WARNING;
|
||||||
printLiteralString(str, v.string.s);
|
printLiteralString(str, v.string_view());
|
||||||
str << ANSI_NORMAL;
|
str << ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
|
|
@ -440,8 +440,8 @@ Value & AttrCursor::forceValue()
|
||||||
|
|
||||||
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
|
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
|
||||||
if (v.type() == nString)
|
if (v.type() == nString)
|
||||||
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
|
cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()),
|
||||||
string_t{v.string.s, {}}};
|
string_t{v.c_str(), {}}};
|
||||||
else if (v.type() == nPath) {
|
else if (v.type() == nPath) {
|
||||||
auto path = v.path().path;
|
auto path = v.path().path;
|
||||||
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
|
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
|
||||||
|
@ -582,7 +582,7 @@ std::string AttrCursor::getString()
|
||||||
if (v.type() != nString && v.type() != nPath)
|
if (v.type() != nString && v.type() != nPath)
|
||||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
|
|
||||||
return v.type() == nString ? v.string.s : v.path().to_string();
|
return v.type() == nString ? v.c_str() : v.path().to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
string_t AttrCursor::getStringWithContext()
|
string_t AttrCursor::getStringWithContext()
|
||||||
|
@ -624,7 +624,7 @@ string_t AttrCursor::getStringWithContext()
|
||||||
if (v.type() == nString) {
|
if (v.type() == nString) {
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
copyContext(v, context);
|
copyContext(v, context);
|
||||||
return {v.string.s, std::move(context)};
|
return {v.c_str(), std::move(context)};
|
||||||
}
|
}
|
||||||
else if (v.type() == nPath)
|
else if (v.type() == nPath)
|
||||||
return {v.path().to_string(), {}};
|
return {v.path().to_string(), {}};
|
||||||
|
|
|
@ -29,10 +29,12 @@ struct EvalSettings : Config
|
||||||
this, false, "restrict-eval",
|
this, false, "restrict-eval",
|
||||||
R"(
|
R"(
|
||||||
If set to `true`, the Nix evaluator will not allow access to any
|
If set to `true`, the Nix evaluator will not allow access to any
|
||||||
files outside of the Nix search path (as set via the `NIX_PATH`
|
files outside of
|
||||||
environment variable or the `-I` option), or to URIs outside of
|
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
|
||||||
[`allowed-uris`](../command-ref/conf-file.md#conf-allowed-uris).
|
or to URIs outside of
|
||||||
The default is `false`.
|
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
|
||||||
|
|
||||||
|
Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<bool> pureEval{this, false, "pure-eval",
|
Setting<bool> pureEval{this, false, "pure-eval",
|
||||||
|
@ -40,18 +42,22 @@ struct EvalSettings : Config
|
||||||
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
|
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
|
||||||
|
|
||||||
- Restrict file system and network access to files specified by cryptographic hash
|
- Restrict file system and network access to files specified by cryptographic hash
|
||||||
- Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
|
- Disable impure constants:
|
||||||
|
- [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
|
||||||
|
- [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
|
||||||
|
- [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath)
|
||||||
)"
|
)"
|
||||||
};
|
};
|
||||||
|
|
||||||
Setting<bool> enableImportFromDerivation{
|
Setting<bool> enableImportFromDerivation{
|
||||||
this, true, "allow-import-from-derivation",
|
this, true, "allow-import-from-derivation",
|
||||||
R"(
|
R"(
|
||||||
By default, Nix allows you to `import` from a derivation, allowing
|
By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md).
|
||||||
building at evaluation time. With this option set to false, Nix will
|
|
||||||
throw an error when evaluating an expression that uses this feature,
|
With this option set to `false`, Nix will throw an error when evaluating an expression that uses this feature,
|
||||||
allowing users to ensure their evaluation will not require any
|
even when the required store object is readily available.
|
||||||
builds to take place.
|
This ensures that evaluation will not require any builds to take place,
|
||||||
|
regardless of the state of the store.
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<Strings> allowedUris{this, {}, "allowed-uris",
|
Setting<Strings> allowedUris{this, {}, "allowed-uris",
|
||||||
|
|
|
@ -114,7 +114,7 @@ void Value::print(const SymbolTable &symbols, std::ostream &str,
|
||||||
printLiteralBool(str, boolean);
|
printLiteralBool(str, boolean);
|
||||||
break;
|
break;
|
||||||
case tString:
|
case tString:
|
||||||
printLiteralString(str, string.s);
|
printLiteralString(str, string_view());
|
||||||
break;
|
break;
|
||||||
case tPath:
|
case tPath:
|
||||||
str << path().to_string(); // !!! escaping?
|
str << path().to_string(); // !!! escaping?
|
||||||
|
@ -339,7 +339,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
||||||
Value nameValue;
|
Value nameValue;
|
||||||
name.expr->eval(state, env, nameValue);
|
name.expr->eval(state, env, nameValue);
|
||||||
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
|
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
|
||||||
return state.symbols.create(nameValue.string.s);
|
return state.symbols.create(nameValue.string_view());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1343,7 +1343,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||||
if (nameVal.type() == nNull)
|
if (nameVal.type() == nNull)
|
||||||
continue;
|
continue;
|
||||||
state.forceStringNoCtx(nameVal, i.pos, "while evaluating the name of a dynamic attribute");
|
state.forceStringNoCtx(nameVal, i.pos, "while evaluating the name of a dynamic attribute");
|
||||||
auto nameSym = state.symbols.create(nameVal.string.s);
|
auto nameSym = state.symbols.create(nameVal.string_view());
|
||||||
Bindings::iterator j = v.attrs->find(nameSym);
|
Bindings::iterator j = v.attrs->find(nameSym);
|
||||||
if (j != v.attrs->end())
|
if (j != v.attrs->end())
|
||||||
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
|
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||||
|
@ -2155,7 +2155,7 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nString)
|
if (v.type() != nString)
|
||||||
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
|
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
|
||||||
return v.string.s;
|
return v.string_view();
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
throw;
|
throw;
|
||||||
|
@ -2182,8 +2182,8 @@ std::string_view EvalState::forceString(Value & v, NixStringContext & context, c
|
||||||
std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx)
|
std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
auto s = forceString(v, pos, errorCtx);
|
auto s = forceString(v, pos, errorCtx);
|
||||||
if (v.string.context) {
|
if (v.context()) {
|
||||||
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string.s, v.string.context[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
}
|
}
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
@ -2196,7 +2196,7 @@ bool EvalState::isDerivation(Value & v)
|
||||||
if (i == v.attrs->end()) return false;
|
if (i == v.attrs->end()) return false;
|
||||||
forceValue(*i->value, i->pos);
|
forceValue(*i->value, i->pos);
|
||||||
if (i->value->type() != nString) return false;
|
if (i->value->type() != nString) return false;
|
||||||
return strcmp(i->value->string.s, "derivation") == 0;
|
return i->value->string_view().compare("derivation") == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2228,7 +2228,7 @@ BackedStringView EvalState::coerceToString(
|
||||||
|
|
||||||
if (v.type() == nString) {
|
if (v.type() == nString) {
|
||||||
copyContext(v, context);
|
copyContext(v, context);
|
||||||
return std::string_view(v.string.s);
|
return v.string_view();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (v.type() == nPath) {
|
if (v.type() == nPath) {
|
||||||
|
@ -2426,7 +2426,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
||||||
return v1.boolean == v2.boolean;
|
return v1.boolean == v2.boolean;
|
||||||
|
|
||||||
case nString:
|
case nString:
|
||||||
return strcmp(v1.string.s, v2.string.s) == 0;
|
return v1.string_view().compare(v2.string_view()) == 0;
|
||||||
|
|
||||||
case nPath:
|
case nPath:
|
||||||
return strcmp(v1._path, v2._path) == 0;
|
return strcmp(v1._path, v2._path) == 0;
|
||||||
|
@ -2477,10 +2477,37 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::printStats()
|
bool EvalState::fullGC() {
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
GC_gcollect();
|
||||||
|
// Check that it ran. We might replace this with a version that uses more
|
||||||
|
// of the boehm API to get this reliably, at a maintenance cost.
|
||||||
|
// We use a 1K margin because technically this has a race condtion, but we
|
||||||
|
// probably won't encounter it in practice, because the CLI isn't concurrent
|
||||||
|
// like that.
|
||||||
|
return GC_get_bytes_since_gc() < 1024;
|
||||||
|
#else
|
||||||
|
return false;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void EvalState::maybePrintStats()
|
||||||
{
|
{
|
||||||
bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0";
|
bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0";
|
||||||
|
|
||||||
|
if (showStats) {
|
||||||
|
// Make the final heap size more deterministic.
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
if (!fullGC()) {
|
||||||
|
warn("failed to perform a full GC before reporting stats");
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
printStatistics();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void EvalState::printStatistics()
|
||||||
|
{
|
||||||
struct rusage buf;
|
struct rusage buf;
|
||||||
getrusage(RUSAGE_SELF, &buf);
|
getrusage(RUSAGE_SELF, &buf);
|
||||||
float cpuTime = buf.ru_utime.tv_sec + ((float) buf.ru_utime.tv_usec / 1000000);
|
float cpuTime = buf.ru_utime.tv_sec + ((float) buf.ru_utime.tv_usec / 1000000);
|
||||||
|
@ -2494,105 +2521,105 @@ void EvalState::printStats()
|
||||||
GC_word heapSize, totalBytes;
|
GC_word heapSize, totalBytes;
|
||||||
GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes);
|
GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes);
|
||||||
#endif
|
#endif
|
||||||
if (showStats) {
|
|
||||||
auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-");
|
auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-");
|
||||||
std::fstream fs;
|
std::fstream fs;
|
||||||
if (outPath != "-")
|
if (outPath != "-")
|
||||||
fs.open(outPath, std::fstream::out);
|
fs.open(outPath, std::fstream::out);
|
||||||
json topObj = json::object();
|
json topObj = json::object();
|
||||||
topObj["cpuTime"] = cpuTime;
|
topObj["cpuTime"] = cpuTime;
|
||||||
topObj["envs"] = {
|
topObj["envs"] = {
|
||||||
{"number", nrEnvs},
|
{"number", nrEnvs},
|
||||||
{"elements", nrValuesInEnvs},
|
{"elements", nrValuesInEnvs},
|
||||||
{"bytes", bEnvs},
|
{"bytes", bEnvs},
|
||||||
};
|
};
|
||||||
topObj["list"] = {
|
topObj["nrExprs"] = Expr::nrExprs;
|
||||||
{"elements", nrListElems},
|
topObj["list"] = {
|
||||||
{"bytes", bLists},
|
{"elements", nrListElems},
|
||||||
{"concats", nrListConcats},
|
{"bytes", bLists},
|
||||||
};
|
{"concats", nrListConcats},
|
||||||
topObj["values"] = {
|
};
|
||||||
{"number", nrValues},
|
topObj["values"] = {
|
||||||
{"bytes", bValues},
|
{"number", nrValues},
|
||||||
};
|
{"bytes", bValues},
|
||||||
topObj["symbols"] = {
|
};
|
||||||
{"number", symbols.size()},
|
topObj["symbols"] = {
|
||||||
{"bytes", symbols.totalSize()},
|
{"number", symbols.size()},
|
||||||
};
|
{"bytes", symbols.totalSize()},
|
||||||
topObj["sets"] = {
|
};
|
||||||
{"number", nrAttrsets},
|
topObj["sets"] = {
|
||||||
{"bytes", bAttrsets},
|
{"number", nrAttrsets},
|
||||||
{"elements", nrAttrsInAttrsets},
|
{"bytes", bAttrsets},
|
||||||
};
|
{"elements", nrAttrsInAttrsets},
|
||||||
topObj["sizes"] = {
|
};
|
||||||
{"Env", sizeof(Env)},
|
topObj["sizes"] = {
|
||||||
{"Value", sizeof(Value)},
|
{"Env", sizeof(Env)},
|
||||||
{"Bindings", sizeof(Bindings)},
|
{"Value", sizeof(Value)},
|
||||||
{"Attr", sizeof(Attr)},
|
{"Bindings", sizeof(Bindings)},
|
||||||
};
|
{"Attr", sizeof(Attr)},
|
||||||
topObj["nrOpUpdates"] = nrOpUpdates;
|
};
|
||||||
topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
|
topObj["nrOpUpdates"] = nrOpUpdates;
|
||||||
topObj["nrThunks"] = nrThunks;
|
topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
|
||||||
topObj["nrAvoided"] = nrAvoided;
|
topObj["nrThunks"] = nrThunks;
|
||||||
topObj["nrLookups"] = nrLookups;
|
topObj["nrAvoided"] = nrAvoided;
|
||||||
topObj["nrPrimOpCalls"] = nrPrimOpCalls;
|
topObj["nrLookups"] = nrLookups;
|
||||||
topObj["nrFunctionCalls"] = nrFunctionCalls;
|
topObj["nrPrimOpCalls"] = nrPrimOpCalls;
|
||||||
|
topObj["nrFunctionCalls"] = nrFunctionCalls;
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
topObj["gc"] = {
|
topObj["gc"] = {
|
||||||
{"heapSize", heapSize},
|
{"heapSize", heapSize},
|
||||||
{"totalBytes", totalBytes},
|
{"totalBytes", totalBytes},
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (countCalls) {
|
if (countCalls) {
|
||||||
topObj["primops"] = primOpCalls;
|
topObj["primops"] = primOpCalls;
|
||||||
{
|
{
|
||||||
auto& list = topObj["functions"];
|
auto& list = topObj["functions"];
|
||||||
list = json::array();
|
list = json::array();
|
||||||
for (auto & [fun, count] : functionCalls) {
|
for (auto & [fun, count] : functionCalls) {
|
||||||
json obj = json::object();
|
json obj = json::object();
|
||||||
if (fun->name)
|
if (fun->name)
|
||||||
obj["name"] = (std::string_view) symbols[fun->name];
|
obj["name"] = (std::string_view) symbols[fun->name];
|
||||||
else
|
else
|
||||||
obj["name"] = nullptr;
|
obj["name"] = nullptr;
|
||||||
if (auto pos = positions[fun->pos]) {
|
if (auto pos = positions[fun->pos]) {
|
||||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||||
obj["file"] = path->to_string();
|
obj["file"] = path->to_string();
|
||||||
obj["line"] = pos.line;
|
obj["line"] = pos.line;
|
||||||
obj["column"] = pos.column;
|
obj["column"] = pos.column;
|
||||||
}
|
|
||||||
obj["count"] = count;
|
|
||||||
list.push_back(obj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
{
|
|
||||||
auto list = topObj["attributes"];
|
|
||||||
list = json::array();
|
|
||||||
for (auto & i : attrSelects) {
|
|
||||||
json obj = json::object();
|
|
||||||
if (auto pos = positions[i.first]) {
|
|
||||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
|
||||||
obj["file"] = path->to_string();
|
|
||||||
obj["line"] = pos.line;
|
|
||||||
obj["column"] = pos.column;
|
|
||||||
}
|
|
||||||
obj["count"] = i.second;
|
|
||||||
list.push_back(obj);
|
|
||||||
}
|
}
|
||||||
|
obj["count"] = count;
|
||||||
|
list.push_back(obj);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
auto list = topObj["attributes"];
|
||||||
|
list = json::array();
|
||||||
|
for (auto & i : attrSelects) {
|
||||||
|
json obj = json::object();
|
||||||
|
if (auto pos = positions[i.first]) {
|
||||||
|
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||||
|
obj["file"] = path->to_string();
|
||||||
|
obj["line"] = pos.line;
|
||||||
|
obj["column"] = pos.column;
|
||||||
|
}
|
||||||
|
obj["count"] = i.second;
|
||||||
|
list.push_back(obj);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
|
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
|
||||||
// XXX: overrides earlier assignment
|
// XXX: overrides earlier assignment
|
||||||
topObj["symbols"] = json::array();
|
topObj["symbols"] = json::array();
|
||||||
auto &list = topObj["symbols"];
|
auto &list = topObj["symbols"];
|
||||||
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
|
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
|
||||||
}
|
}
|
||||||
if (outPath == "-") {
|
if (outPath == "-") {
|
||||||
std::cerr << topObj.dump(2) << std::endl;
|
std::cerr << topObj.dump(2) << std::endl;
|
||||||
} else {
|
} else {
|
||||||
fs << topObj.dump(2) << std::endl;
|
fs << topObj.dump(2) << std::endl;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -709,9 +709,25 @@ public:
|
||||||
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print statistics.
|
* Print statistics, if enabled.
|
||||||
|
*
|
||||||
|
* Performs a full memory GC before printing the statistics, so that the
|
||||||
|
* GC statistics are more accurate.
|
||||||
*/
|
*/
|
||||||
void printStats();
|
void maybePrintStats();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print statistics, unconditionally, cheaply, without performing a GC first.
|
||||||
|
*/
|
||||||
|
void printStatistics();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform a full memory garbage collection - not incremental.
|
||||||
|
*
|
||||||
|
* @return true if Nix was built with GC and a GC was performed, false if not.
|
||||||
|
* The return value is currently not thread safe - just the return value.
|
||||||
|
*/
|
||||||
|
bool fullGC();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Realise the given context, and return a mapping from the placeholders
|
* Realise the given context, and return a mapping from the placeholders
|
||||||
|
|
|
@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
try {
|
try {
|
||||||
if (attr.name == sUrl) {
|
if (attr.name == sUrl) {
|
||||||
expectType(state, nString, *attr.value, attr.pos);
|
expectType(state, nString, *attr.value, attr.pos);
|
||||||
url = attr.value->string.s;
|
url = attr.value->string_view();
|
||||||
attrs.emplace("url", *url);
|
attrs.emplace("url", *url);
|
||||||
} else if (attr.name == sFlake) {
|
} else if (attr.name == sFlake) {
|
||||||
expectType(state, nBool, *attr.value, attr.pos);
|
expectType(state, nBool, *attr.value, attr.pos);
|
||||||
|
@ -122,7 +122,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
|
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
|
||||||
} else if (attr.name == sFollows) {
|
} else if (attr.name == sFollows) {
|
||||||
expectType(state, nString, *attr.value, attr.pos);
|
expectType(state, nString, *attr.value, attr.pos);
|
||||||
auto follows(parseInputPath(attr.value->string.s));
|
auto follows(parseInputPath(attr.value->c_str()));
|
||||||
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||||
input.follows = follows;
|
input.follows = follows;
|
||||||
} else {
|
} else {
|
||||||
|
@ -131,7 +131,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||||
switch (attr.value->type()) {
|
switch (attr.value->type()) {
|
||||||
case nString:
|
case nString:
|
||||||
attrs.emplace(state.symbols[attr.name], attr.value->string.s);
|
attrs.emplace(state.symbols[attr.name], attr.value->c_str());
|
||||||
break;
|
break;
|
||||||
case nBool:
|
case nBool:
|
||||||
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
|
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
|
||||||
|
@ -229,7 +229,7 @@ static Flake getFlake(
|
||||||
|
|
||||||
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
||||||
expectType(state, nString, *description->value, description->pos);
|
expectType(state, nString, *description->value, description->pos);
|
||||||
flake.description = description->value->string.s;
|
flake.description = description->value->c_str();
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sInputs = state.symbols.create("inputs");
|
auto sInputs = state.symbols.create("inputs");
|
||||||
|
@ -850,7 +850,7 @@ static void prim_flakeRefToString(
|
||||||
Explicit<bool> { attr.value->boolean });
|
Explicit<bool> { attr.value->boolean });
|
||||||
} else if (t == nString) {
|
} else if (t == nString) {
|
||||||
attrs.emplace(state.symbols[attr.name],
|
attrs.emplace(state.symbols[attr.name],
|
||||||
std::string(attr.value->str()));
|
std::string(attr.value->string_view()));
|
||||||
} else {
|
} else {
|
||||||
state.error(
|
state.error(
|
||||||
"flake reference attribute sets may only contain integers, Booleans, "
|
"flake reference attribute sets may only contain integers, Booleans, "
|
||||||
|
|
|
@ -69,32 +69,130 @@ std::optional<FlakeRef> maybeParseFlakeRef(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::optional<Path> & baseDir,
|
const std::optional<Path> & baseDir,
|
||||||
bool allowMissing,
|
bool allowMissing,
|
||||||
bool isFlake)
|
bool isFlake)
|
||||||
{
|
{
|
||||||
using namespace fetchers;
|
std::string path = url;
|
||||||
|
std::string fragment = "";
|
||||||
|
std::map<std::string, std::string> query;
|
||||||
|
auto pathEnd = url.find_first_of("#?");
|
||||||
|
auto fragmentStart = pathEnd;
|
||||||
|
if (pathEnd != std::string::npos && url[pathEnd] == '?') {
|
||||||
|
fragmentStart = url.find("#");
|
||||||
|
}
|
||||||
|
if (pathEnd != std::string::npos) {
|
||||||
|
path = url.substr(0, pathEnd);
|
||||||
|
}
|
||||||
|
if (fragmentStart != std::string::npos) {
|
||||||
|
fragment = percentDecode(url.substr(fragmentStart+1));
|
||||||
|
}
|
||||||
|
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
|
||||||
|
query = decodeQuery(url.substr(pathEnd+1, fragmentStart));
|
||||||
|
}
|
||||||
|
|
||||||
static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+";
|
if (baseDir) {
|
||||||
|
/* Check if 'url' is a path (either absolute or relative
|
||||||
|
to 'baseDir'). If so, search upward to the root of the
|
||||||
|
repo (i.e. the directory containing .git). */
|
||||||
|
|
||||||
static std::regex pathUrlRegex(
|
path = absPath(path, baseDir);
|
||||||
"(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)"
|
|
||||||
+ "(?:\\?(" + queryRegex + "))?"
|
if (isFlake) {
|
||||||
+ "(?:#(" + queryRegex + "))?",
|
|
||||||
std::regex::ECMAScript);
|
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||||
|
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||||
|
|
||||||
|
// Save device to detect filesystem boundary
|
||||||
|
dev_t device = lstat(path).st_dev;
|
||||||
|
bool found = false;
|
||||||
|
while (path != "/") {
|
||||||
|
if (pathExists(path + "/flake.nix")) {
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
} else if (pathExists(path + "/.git"))
|
||||||
|
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||||
|
else {
|
||||||
|
if (lstat(path).st_dev != device)
|
||||||
|
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||||
|
}
|
||||||
|
path = dirOf(path);
|
||||||
|
}
|
||||||
|
if (!found)
|
||||||
|
throw BadURL("could not find a flake.nix file");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!S_ISDIR(lstat(path).st_mode))
|
||||||
|
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||||
|
|
||||||
|
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||||
|
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||||
|
|
||||||
|
auto flakeRoot = path;
|
||||||
|
std::string subdir;
|
||||||
|
|
||||||
|
while (flakeRoot != "/") {
|
||||||
|
if (pathExists(flakeRoot + "/.git")) {
|
||||||
|
auto base = std::string("git+file://") + flakeRoot;
|
||||||
|
|
||||||
|
auto parsedURL = ParsedURL{
|
||||||
|
.url = base, // FIXME
|
||||||
|
.base = base,
|
||||||
|
.scheme = "git+file",
|
||||||
|
.authority = "",
|
||||||
|
.path = flakeRoot,
|
||||||
|
.query = query,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (subdir != "") {
|
||||||
|
if (parsedURL.query.count("dir"))
|
||||||
|
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||||
|
parsedURL.query.insert_or_assign("dir", subdir);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||||
|
parsedURL.query.insert_or_assign("shallow", "1");
|
||||||
|
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(fetchers::Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
|
||||||
|
fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||||
|
flakeRoot = dirOf(flakeRoot);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if (!hasPrefix(path, "/"))
|
||||||
|
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||||
|
path = canonPath(path + "/" + getOr(query, "dir", ""));
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchers::Attrs attrs;
|
||||||
|
attrs.insert_or_assign("type", "path");
|
||||||
|
attrs.insert_or_assign("path", path);
|
||||||
|
|
||||||
|
return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(std::move(attrs)), ""), fragment);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
||||||
|
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
||||||
|
std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
|
||||||
|
const std::string & url,
|
||||||
|
bool isFlake
|
||||||
|
)
|
||||||
|
{
|
||||||
|
std::smatch match;
|
||||||
|
|
||||||
static std::regex flakeRegex(
|
static std::regex flakeRegex(
|
||||||
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
||||||
+ "(?:#(" + queryRegex + "))?",
|
+ "(?:#(" + queryRegex + "))?",
|
||||||
std::regex::ECMAScript);
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
std::smatch match;
|
|
||||||
|
|
||||||
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
|
||||||
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
|
||||||
|
|
||||||
if (std::regex_match(url, match, flakeRegex)) {
|
if (std::regex_match(url, match, flakeRegex)) {
|
||||||
auto parsedURL = ParsedURL{
|
auto parsedURL = ParsedURL{
|
||||||
.url = url,
|
.url = url,
|
||||||
|
@ -105,111 +203,53 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
};
|
};
|
||||||
|
|
||||||
return std::make_pair(
|
return std::make_pair(
|
||||||
FlakeRef(Input::fromURL(parsedURL, isFlake), ""),
|
FlakeRef(fetchers::Input::fromURL(parsedURL, isFlake), ""),
|
||||||
percentDecode(match.str(6)));
|
percentDecode(match.str(6)));
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (std::regex_match(url, match, pathUrlRegex)) {
|
return {};
|
||||||
std::string path = match[1];
|
}
|
||||||
std::string fragment = percentDecode(match.str(3));
|
|
||||||
|
|
||||||
if (baseDir) {
|
std::optional<std::pair<FlakeRef, std::string>> parseURLFlakeRef(
|
||||||
/* Check if 'url' is a path (either absolute or relative
|
const std::string & url,
|
||||||
to 'baseDir'). If so, search upward to the root of the
|
const std::optional<Path> & baseDir,
|
||||||
repo (i.e. the directory containing .git). */
|
bool isFlake
|
||||||
|
)
|
||||||
path = absPath(path, baseDir);
|
{
|
||||||
|
ParsedURL parsedURL;
|
||||||
if (isFlake) {
|
try {
|
||||||
|
parsedURL = parseURL(url);
|
||||||
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
} catch (BadURL &) {
|
||||||
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
return std::nullopt;
|
||||||
|
|
||||||
// Save device to detect filesystem boundary
|
|
||||||
dev_t device = lstat(path).st_dev;
|
|
||||||
bool found = false;
|
|
||||||
while (path != "/") {
|
|
||||||
if (pathExists(path + "/flake.nix")) {
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
} else if (pathExists(path + "/.git"))
|
|
||||||
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
|
||||||
else {
|
|
||||||
if (lstat(path).st_dev != device)
|
|
||||||
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
|
||||||
}
|
|
||||||
path = dirOf(path);
|
|
||||||
}
|
|
||||||
if (!found)
|
|
||||||
throw BadURL("could not find a flake.nix file");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!S_ISDIR(lstat(path).st_mode))
|
|
||||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
|
||||||
|
|
||||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
|
||||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
|
||||||
|
|
||||||
auto flakeRoot = path;
|
|
||||||
std::string subdir;
|
|
||||||
|
|
||||||
while (flakeRoot != "/") {
|
|
||||||
if (pathExists(flakeRoot + "/.git")) {
|
|
||||||
auto base = std::string("git+file://") + flakeRoot;
|
|
||||||
|
|
||||||
auto parsedURL = ParsedURL{
|
|
||||||
.url = base, // FIXME
|
|
||||||
.base = base,
|
|
||||||
.scheme = "git+file",
|
|
||||||
.authority = "",
|
|
||||||
.path = flakeRoot,
|
|
||||||
.query = decodeQuery(match[2]),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (subdir != "") {
|
|
||||||
if (parsedURL.query.count("dir"))
|
|
||||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
|
||||||
parsedURL.query.insert_or_assign("dir", subdir);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
|
||||||
parsedURL.query.insert_or_assign("shallow", "1");
|
|
||||||
|
|
||||||
return std::make_pair(
|
|
||||||
FlakeRef(Input::fromURL(parsedURL, isFlake), getOr(parsedURL.query, "dir", "")),
|
|
||||||
fragment);
|
|
||||||
}
|
|
||||||
|
|
||||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
|
||||||
flakeRoot = dirOf(flakeRoot);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
if (!hasPrefix(path, "/"))
|
|
||||||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
|
||||||
auto query = decodeQuery(match[2]);
|
|
||||||
path = canonPath(path + "/" + getOr(query, "dir", ""));
|
|
||||||
}
|
|
||||||
|
|
||||||
fetchers::Attrs attrs;
|
|
||||||
attrs.insert_or_assign("type", "path");
|
|
||||||
attrs.insert_or_assign("path", path);
|
|
||||||
|
|
||||||
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
std::string fragment;
|
||||||
auto parsedURL = parseURL(url);
|
std::swap(fragment, parsedURL.fragment);
|
||||||
std::string fragment;
|
|
||||||
std::swap(fragment, parsedURL.fragment);
|
|
||||||
|
|
||||||
auto input = Input::fromURL(parsedURL, isFlake);
|
auto input = fetchers::Input::fromURL(parsedURL, isFlake);
|
||||||
input.parent = baseDir;
|
input.parent = baseDir;
|
||||||
|
|
||||||
return std::make_pair(
|
return std::make_pair(
|
||||||
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
|
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
|
||||||
fragment);
|
fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
|
const std::string & url,
|
||||||
|
const std::optional<Path> & baseDir,
|
||||||
|
bool allowMissing,
|
||||||
|
bool isFlake)
|
||||||
|
{
|
||||||
|
using namespace fetchers;
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
|
|
||||||
|
if (auto res = parseFlakeIdRef(url, isFlake)) {
|
||||||
|
return *res;
|
||||||
|
} else if (auto res = parseURLFlakeRef(url, baseDir, isFlake)) {
|
||||||
|
return *res;
|
||||||
|
} else {
|
||||||
|
return parsePathFlakeRefWithFragment(url, baseDir, allowMissing, isFlake);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,4 +289,6 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
|
||||||
return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)};
|
return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "outputs-spec.hh"
|
#include "outputs-spec.hh"
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
@ -91,5 +92,7 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
|
||||||
bool allowMissing = false,
|
bool allowMissing = false,
|
||||||
bool isFlake = true);
|
bool isFlake = true);
|
||||||
|
|
||||||
|
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
|
||||||
|
extern std::regex flakeIdRegex;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,7 +156,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
||||||
Outputs result;
|
Outputs result;
|
||||||
for (auto elem : outTI->listItems()) {
|
for (auto elem : outTI->listItems()) {
|
||||||
if (elem->type() != nString) throw errMsg;
|
if (elem->type() != nString) throw errMsg;
|
||||||
auto out = outputs.find(elem->string.s);
|
auto out = outputs.find(elem->c_str());
|
||||||
if (out == outputs.end()) throw errMsg;
|
if (out == outputs.end()) throw errMsg;
|
||||||
result.insert(*out);
|
result.insert(*out);
|
||||||
}
|
}
|
||||||
|
@ -230,7 +230,7 @@ std::string DrvInfo::queryMetaString(const std::string & name)
|
||||||
{
|
{
|
||||||
Value * v = queryMeta(name);
|
Value * v = queryMeta(name);
|
||||||
if (!v || v->type() != nString) return "";
|
if (!v || v->type() != nString) return "";
|
||||||
return v->string.s;
|
return v->c_str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -242,7 +242,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||||
if (v->type() == nString) {
|
if (v->type() == nString) {
|
||||||
/* Backwards compatibility with before we had support for
|
/* Backwards compatibility with before we had support for
|
||||||
integer meta fields. */
|
integer meta fields. */
|
||||||
if (auto n = string2Int<NixInt>(v->string.s))
|
if (auto n = string2Int<NixInt>(v->c_str()))
|
||||||
return *n;
|
return *n;
|
||||||
}
|
}
|
||||||
return def;
|
return def;
|
||||||
|
@ -256,7 +256,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||||
if (v->type() == nString) {
|
if (v->type() == nString) {
|
||||||
/* Backwards compatibility with before we had support for
|
/* Backwards compatibility with before we had support for
|
||||||
float meta fields. */
|
float meta fields. */
|
||||||
if (auto n = string2Float<NixFloat>(v->string.s))
|
if (auto n = string2Float<NixFloat>(v->c_str()))
|
||||||
return *n;
|
return *n;
|
||||||
}
|
}
|
||||||
return def;
|
return def;
|
||||||
|
@ -271,8 +271,8 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def)
|
||||||
if (v->type() == nString) {
|
if (v->type() == nString) {
|
||||||
/* Backwards compatibility with before we had support for
|
/* Backwards compatibility with before we had support for
|
||||||
Boolean meta fields. */
|
Boolean meta fields. */
|
||||||
if (strcmp(v->string.s, "true") == 0) return true;
|
if (v->string_view() == "true") return true;
|
||||||
if (strcmp(v->string.s, "false") == 0) return false;
|
if (v->string_view() == "false") return false;
|
||||||
}
|
}
|
||||||
return def;
|
return def;
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,12 +76,12 @@ void Expr::show(const SymbolTable & symbols, std::ostream & str) const
|
||||||
|
|
||||||
void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const
|
void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const
|
||||||
{
|
{
|
||||||
str << n;
|
str << v.integer;
|
||||||
}
|
}
|
||||||
|
|
||||||
void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const
|
void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const
|
||||||
{
|
{
|
||||||
str << nf;
|
str << v.fpoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
void ExprString::show(const SymbolTable & symbols, std::ostream & str) const
|
void ExprString::show(const SymbolTable & symbols, std::ostream & str) const
|
||||||
|
|
|
@ -155,6 +155,10 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
||||||
|
|
||||||
struct Expr
|
struct Expr
|
||||||
{
|
{
|
||||||
|
static unsigned long nrExprs;
|
||||||
|
Expr() {
|
||||||
|
nrExprs++;
|
||||||
|
}
|
||||||
virtual ~Expr() { };
|
virtual ~Expr() { };
|
||||||
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
|
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
|
||||||
virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||||
|
@ -171,18 +175,16 @@ struct Expr
|
||||||
|
|
||||||
struct ExprInt : Expr
|
struct ExprInt : Expr
|
||||||
{
|
{
|
||||||
NixInt n;
|
|
||||||
Value v;
|
Value v;
|
||||||
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
ExprInt(NixInt n) { v.mkInt(n); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ExprFloat : Expr
|
struct ExprFloat : Expr
|
||||||
{
|
{
|
||||||
NixFloat nf;
|
|
||||||
Value v;
|
Value v;
|
||||||
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
ExprFloat(NixFloat nf) { v.mkFloat(nf); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
@ -238,7 +240,7 @@ struct ExprSelect : Expr
|
||||||
PosIdx pos;
|
PosIdx pos;
|
||||||
Expr * e, * def;
|
Expr * e, * def;
|
||||||
AttrPath attrPath;
|
AttrPath attrPath;
|
||||||
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath && attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
|
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
|
||||||
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
||||||
PosIdx getPos() const override { return pos; }
|
PosIdx getPos() const override { return pos; }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
|
@ -248,7 +250,7 @@ struct ExprOpHasAttr : Expr
|
||||||
{
|
{
|
||||||
Expr * e;
|
Expr * e;
|
||||||
AttrPath attrPath;
|
AttrPath attrPath;
|
||||||
ExprOpHasAttr(Expr * e, const AttrPath && attrPath) : e(e), attrPath(std::move(attrPath)) { };
|
ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { };
|
||||||
PosIdx getPos() const override { return e->getPos(); }
|
PosIdx getPos() const override { return e->getPos(); }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
|
@ -522,7 +522,7 @@ path_start
|
||||||
/* add back in the trailing '/' to the first segment */
|
/* add back in the trailing '/' to the first segment */
|
||||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||||
path += "/";
|
path += "/";
|
||||||
$$ = new ExprPath(path);
|
$$ = new ExprPath(std::move(path));
|
||||||
}
|
}
|
||||||
| HPATH {
|
| HPATH {
|
||||||
if (evalSettings.pureEval) {
|
if (evalSettings.pureEval) {
|
||||||
|
@ -532,7 +532,7 @@ path_start
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||||
$$ = new ExprPath(path);
|
$$ = new ExprPath(std::move(path));
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -655,6 +655,7 @@ formal
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
unsigned long Expr::nrExprs = 0;
|
||||||
|
|
||||||
Expr * EvalState::parse(
|
Expr * EvalState::parse(
|
||||||
char * text,
|
char * text,
|
||||||
|
|
|
@ -258,64 +258,71 @@ static RegisterPrimOp primop_import({
|
||||||
.args = {"path"},
|
.args = {"path"},
|
||||||
// TODO turn "normal path values" into link below
|
// TODO turn "normal path values" into link below
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Load, parse and return the Nix expression in the file *path*.
|
Load, parse, and return the Nix expression in the file *path*.
|
||||||
|
|
||||||
The value *path* can be a path, a string, or an attribute set with an
|
|
||||||
`__toString` attribute or a `outPath` attribute (as derivations or flake
|
|
||||||
inputs typically have).
|
|
||||||
|
|
||||||
If *path* is a directory, the file `default.nix` in that directory
|
|
||||||
is loaded.
|
|
||||||
|
|
||||||
Evaluation aborts if the file doesn’t exist or contains
|
|
||||||
an incorrect Nix expression. `import` implements Nix’s module
|
|
||||||
system: you can put any Nix expression (such as a set or a
|
|
||||||
function) in a separate file, and use it from Nix expressions in
|
|
||||||
other files.
|
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> Unlike some languages, `import` is a regular function in Nix.
|
> Unlike some languages, `import` is a regular function in Nix.
|
||||||
> Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
|
|
||||||
> are normal [path values](@docroot@/language/values.md#type-path).
|
|
||||||
|
|
||||||
A Nix expression loaded by `import` must not contain any *free
|
The *path* argument must meet the same criteria as an [interpolated expression](@docroot@/language/string-interpolation.md#interpolated-expression).
|
||||||
variables* (identifiers that are not defined in the Nix expression
|
|
||||||
itself and are not built-in). Therefore, it cannot refer to
|
|
||||||
variables that are in scope at the call site. For instance, if you
|
|
||||||
have a calling expression
|
|
||||||
|
|
||||||
```nix
|
If *path* is a directory, the file `default.nix` in that directory is used if it exists.
|
||||||
rec {
|
|
||||||
x = 123;
|
|
||||||
y = import ./foo.nix;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
then the following `foo.nix` will give an error:
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```console
|
||||||
|
> $ echo 123 > default.nix
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> Import `default.nix` from the current directory.
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> import ./.
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> 123
|
||||||
|
|
||||||
```nix
|
Evaluation aborts if the file doesn’t exist or contains an invalid Nix expression.
|
||||||
x + 456
|
|
||||||
```
|
|
||||||
|
|
||||||
since `x` is not in scope in `foo.nix`. If you want `x` to be
|
A Nix expression loaded by `import` must not contain any *free variables*, that is, identifiers that are not defined in the Nix expression itself and are not built-in.
|
||||||
available in `foo.nix`, you should pass it as a function argument:
|
Therefore, it cannot refer to variables that are in scope at the call site.
|
||||||
|
|
||||||
```nix
|
> **Example**
|
||||||
rec {
|
>
|
||||||
x = 123;
|
> If you have a calling expression
|
||||||
y = import ./foo.nix x;
|
>
|
||||||
}
|
> ```nix
|
||||||
```
|
> rec {
|
||||||
|
> x = 123;
|
||||||
and
|
> y = import ./foo.nix;
|
||||||
|
> }
|
||||||
```nix
|
> ```
|
||||||
x: x + 456
|
>
|
||||||
```
|
> then the following `foo.nix` will give an error:
|
||||||
|
>
|
||||||
(The function argument doesn’t have to be called `x` in `foo.nix`;
|
> ```nix
|
||||||
any name would work.)
|
> # foo.nix
|
||||||
|
> x + 456
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> since `x` is not in scope in `foo.nix`.
|
||||||
|
> If you want `x` to be available in `foo.nix`, pass it as a function argument:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> rec {
|
||||||
|
> x = 123;
|
||||||
|
> y = import ./foo.nix x;
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> and
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> # foo.nix
|
||||||
|
> x: x + 456
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> The function argument doesn’t have to be called `x` in `foo.nix`; any name would work.
|
||||||
)",
|
)",
|
||||||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
|
@ -590,7 +597,7 @@ struct CompareValues
|
||||||
case nFloat:
|
case nFloat:
|
||||||
return v1->fpoint < v2->fpoint;
|
return v1->fpoint < v2->fpoint;
|
||||||
case nString:
|
case nString:
|
||||||
return strcmp(v1->string.s, v2->string.s) < 0;
|
return v1->string_view().compare(v2->string_view()) < 0;
|
||||||
case nPath:
|
case nPath:
|
||||||
return strcmp(v1->_path, v2->_path) < 0;
|
return strcmp(v1->_path, v2->_path) < 0;
|
||||||
case nList:
|
case nList:
|
||||||
|
@ -982,7 +989,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
|
||||||
{
|
{
|
||||||
state.forceValue(*args[0], pos);
|
state.forceValue(*args[0], pos);
|
||||||
if (args[0]->type() == nString)
|
if (args[0]->type() == nString)
|
||||||
printError("trace: %1%", args[0]->string.s);
|
printError("trace: %1%", args[0]->string_view());
|
||||||
else
|
else
|
||||||
printError("trace: %1%", printValue(state, *args[0]));
|
printError("trace: %1%", printValue(state, *args[0]));
|
||||||
state.forceValue(*args[1], pos);
|
state.forceValue(*args[1], pos);
|
||||||
|
@ -1528,7 +1535,9 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
|
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
|
||||||
|
|
||||||
/* SourcePath doesn't know about trailing slash. */
|
/* SourcePath doesn't know about trailing slash. */
|
||||||
auto mustBeDir = arg.type() == nString && arg.str().ends_with("/");
|
auto mustBeDir = arg.type() == nString
|
||||||
|
&& (arg.string_view().ends_with("/")
|
||||||
|
|| arg.string_view().ends_with("/."));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto checked = state.checkSourcePath(path);
|
auto checked = state.checkSourcePath(path);
|
||||||
|
@ -1689,13 +1698,14 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
|
|
||||||
static RegisterPrimOp primop_findFile(PrimOp {
|
static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
.name = "__findFile",
|
.name = "__findFile",
|
||||||
.args = {"search path", "lookup path"},
|
.args = {"search-path", "lookup-path"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Look up the given path with the given search path.
|
Find *lookup-path* in *search-path*.
|
||||||
|
|
||||||
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes, `prefix`, and `path`.
|
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes:
|
||||||
`prefix` is a relative path.
|
- `prefix` is a relative path.
|
||||||
`path` denotes a file system location; the exact syntax depends on the command line interface.
|
- `path` denotes a file system location
|
||||||
|
The exact syntax depends on the command line interface.
|
||||||
|
|
||||||
Examples of search path attribute sets:
|
Examples of search path attribute sets:
|
||||||
|
|
||||||
|
@ -1713,15 +1723,14 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match.
|
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match:
|
||||||
|
|
||||||
This is the process for each entry:
|
- If *lookup-path* matches `prefix`, then the remainder of *lookup-path* (the "suffix") is searched for within the directory denoted by `path`.
|
||||||
If the lookup path matches `prefix`, then the remainder of the lookup path (the "suffix") is searched for within the directory denoted by `patch`.
|
Note that the `path` may need to be downloaded at this point to look inside.
|
||||||
Note that the `path` may need to be downloaded at this point to look inside.
|
- If the suffix is found inside that directory, then the entry is a match.
|
||||||
If the suffix is found inside that directory, then the entry is a match;
|
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
||||||
the combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
|
||||||
|
|
||||||
The syntax
|
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
<nixpkgs>
|
<nixpkgs>
|
||||||
|
@ -2400,7 +2409,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
(v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]);
|
(v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]);
|
||||||
|
|
||||||
std::sort(v.listElems(), v.listElems() + n,
|
std::sort(v.listElems(), v.listElems() + n,
|
||||||
[](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; });
|
[](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; });
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_attrNames({
|
static RegisterPrimOp primop_attrNames({
|
||||||
|
@ -2541,7 +2550,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args
|
||||||
names.reserve(args[1]->listSize());
|
names.reserve(args[1]->listSize());
|
||||||
for (auto elem : args[1]->listItems()) {
|
for (auto elem : args[1]->listItems()) {
|
||||||
state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs");
|
state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs");
|
||||||
names.emplace_back(state.symbols.create(elem->string.s), nullptr);
|
names.emplace_back(state.symbols.create(elem->string_view()), nullptr);
|
||||||
}
|
}
|
||||||
std::sort(names.begin(), names.end());
|
std::sort(names.begin(), names.end());
|
||||||
|
|
||||||
|
@ -2991,7 +3000,7 @@ static RegisterPrimOp primop_tail({
|
||||||
.name = "__tail",
|
.name = "__tail",
|
||||||
.args = {"list"},
|
.args = {"list"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Return the second to last elements of a list; abort evaluation if
|
Return the list without its first item; abort evaluation if
|
||||||
the argument isn’t a list or is an empty list.
|
the argument isn’t a list or is an empty list.
|
||||||
|
|
||||||
> **Warning**
|
> **Warning**
|
||||||
|
@ -4395,9 +4404,9 @@ void EvalState::createBaseEnv()
|
||||||
addConstant("__nixPath", v, {
|
addConstant("__nixPath", v, {
|
||||||
.type = nList,
|
.type = nList,
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
The search path used to resolve angle bracket path lookups.
|
List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
|
||||||
|
|
||||||
Angle bracket expressions can be
|
Lookup path expressions can be
|
||||||
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
|
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
|
||||||
using this and
|
using this and
|
||||||
[`builtins.findFile`](./builtins.html#builtins-findFile):
|
[`builtins.findFile`](./builtins.html#builtins-findFile):
|
||||||
|
|
|
@ -133,7 +133,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
|
|
||||||
else if (attrName == "toPath") {
|
else if (attrName == "toPath") {
|
||||||
state.forceValue(*attr.value, attr.pos);
|
state.forceValue(*attr.value, attr.pos);
|
||||||
bool isEmptyString = attr.value->type() == nString && attr.value->string.s == std::string("");
|
bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == "";
|
||||||
if (isEmptyString) {
|
if (isEmptyString) {
|
||||||
toPath = StorePathOrGap {};
|
toPath = StorePathOrGap {};
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,15 +195,50 @@ static void fetchTree(
|
||||||
|
|
||||||
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
experimentalFeatureSettings.require(Xp::Flakes);
|
|
||||||
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
|
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: document
|
|
||||||
static RegisterPrimOp primop_fetchTree({
|
static RegisterPrimOp primop_fetchTree({
|
||||||
.name = "fetchTree",
|
.name = "fetchTree",
|
||||||
.arity = 1,
|
.args = {"input"},
|
||||||
.fun = prim_fetchTree
|
.doc = R"(
|
||||||
|
Fetch a source tree or a plain file using one of the supported backends.
|
||||||
|
*input* can be an attribute set representation of [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) or a URL.
|
||||||
|
The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is allowed.
|
||||||
|
|
||||||
|
Here are some examples of how to use `fetchTree`:
|
||||||
|
|
||||||
|
- Fetch a GitHub repository:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.fetchTree {
|
||||||
|
type = "github";
|
||||||
|
owner = "NixOS";
|
||||||
|
repo = "nixpkgs";
|
||||||
|
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This evaluates to attribute set:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
lastModified = 1686503798;
|
||||||
|
lastModifiedDate = "20230611171638";
|
||||||
|
narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
||||||
|
outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
||||||
|
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||||
|
shortRev = "ae2e6b3";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- Fetch a single file from a URL:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.fetchTree "https://example.com/"
|
||||||
|
```
|
||||||
|
)",
|
||||||
|
.fun = prim_fetchTree,
|
||||||
|
.experimentalFeature = Xp::Flakes,
|
||||||
});
|
});
|
||||||
|
|
||||||
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
||||||
|
|
|
@ -71,7 +71,7 @@ namespace nix {
|
||||||
if (arg.type() != nString) {
|
if (arg.type() != nString) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return std::string_view(arg.string.s) == s;
|
return std::string_view(arg.c_str()) == s;
|
||||||
}
|
}
|
||||||
|
|
||||||
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
|
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
|
||||||
|
@ -106,8 +106,8 @@ namespace nix {
|
||||||
if (arg.type() != nPath) {
|
if (arg.type() != nPath) {
|
||||||
*result_listener << "Expected a path got " << arg.type();
|
*result_listener << "Expected a path got " << arg.type();
|
||||||
return false;
|
return false;
|
||||||
} else if (std::string_view(arg.string.s) != p) {
|
} else if (std::string_view(arg._path) != p) {
|
||||||
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s;
|
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.c_str();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -711,14 +711,14 @@ namespace nix {
|
||||||
// FIXME: add a test that verifies the string context is as expected
|
// FIXME: add a test that verifies the string context is as expected
|
||||||
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
|
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
|
||||||
ASSERT_EQ(v.type(), nString);
|
ASSERT_EQ(v.type(), nString);
|
||||||
ASSERT_EQ(v.string.s, std::string_view("fabir"));
|
ASSERT_EQ(v.string_view(), "fabir");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(PrimOpTest, concatStringsSep) {
|
TEST_F(PrimOpTest, concatStringsSep) {
|
||||||
// FIXME: add a test that verifies the string context is as expected
|
// FIXME: add a test that verifies the string context is as expected
|
||||||
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
|
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
|
||||||
ASSERT_EQ(v.type(), nString);
|
ASSERT_EQ(v.type(), nString);
|
||||||
ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz");
|
ASSERT_EQ(v.string_view(), "foo%bar%baz");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(PrimOpTest, split1) {
|
TEST_F(PrimOpTest, split1) {
|
||||||
|
|
|
@ -31,7 +31,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
|
|
||||||
case nString:
|
case nString:
|
||||||
copyContext(v, context);
|
copyContext(v, context);
|
||||||
out = v.string.s;
|
out = v.c_str();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case nPath:
|
case nPath:
|
||||||
|
|
|
@ -74,7 +74,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||||
case nString:
|
case nString:
|
||||||
/* !!! show the context? */
|
/* !!! show the context? */
|
||||||
copyContext(v, context);
|
copyContext(v, context);
|
||||||
doc.writeEmptyElement("string", singletonAttrs("value", v.string.s));
|
doc.writeEmptyElement("string", singletonAttrs("value", v.c_str()));
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case nPath:
|
case nPath:
|
||||||
|
@ -96,14 +96,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||||
if (a != v.attrs->end()) {
|
if (a != v.attrs->end()) {
|
||||||
if (strict) state.forceValue(*a->value, a->pos);
|
if (strict) state.forceValue(*a->value, a->pos);
|
||||||
if (a->value->type() == nString)
|
if (a->value->type() == nString)
|
||||||
xmlAttrs["drvPath"] = drvPath = a->value->string.s;
|
xmlAttrs["drvPath"] = drvPath = a->value->c_str();
|
||||||
}
|
}
|
||||||
|
|
||||||
a = v.attrs->find(state.sOutPath);
|
a = v.attrs->find(state.sOutPath);
|
||||||
if (a != v.attrs->end()) {
|
if (a != v.attrs->end()) {
|
||||||
if (strict) state.forceValue(*a->value, a->pos);
|
if (strict) state.forceValue(*a->value, a->pos);
|
||||||
if (a->value->type() == nString)
|
if (a->value->type() == nString)
|
||||||
xmlAttrs["outPath"] = a->value->string.s;
|
xmlAttrs["outPath"] = a->value->c_str();
|
||||||
}
|
}
|
||||||
|
|
||||||
XMLOpenElement _(doc, "derivation", xmlAttrs);
|
XMLOpenElement _(doc, "derivation", xmlAttrs);
|
||||||
|
|
|
@ -186,10 +186,9 @@ public:
|
||||||
* For canonicity, the store paths should be in sorted order.
|
* For canonicity, the store paths should be in sorted order.
|
||||||
*/
|
*/
|
||||||
struct {
|
struct {
|
||||||
const char * s;
|
const char * c_str;
|
||||||
const char * * context; // must be in sorted order
|
const char * * context; // must be in sorted order
|
||||||
} string;
|
} string;
|
||||||
|
|
||||||
const char * _path;
|
const char * _path;
|
||||||
Bindings * attrs;
|
Bindings * attrs;
|
||||||
struct {
|
struct {
|
||||||
|
@ -270,7 +269,7 @@ public:
|
||||||
inline void mkString(const char * s, const char * * context = 0)
|
inline void mkString(const char * s, const char * * context = 0)
|
||||||
{
|
{
|
||||||
internalType = tString;
|
internalType = tString;
|
||||||
string.s = s;
|
string.c_str = s;
|
||||||
string.context = context;
|
string.context = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -441,10 +440,21 @@ public:
|
||||||
return SourcePath{CanonPath(_path)};
|
return SourcePath{CanonPath(_path)};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view str() const
|
std::string_view string_view() const
|
||||||
{
|
{
|
||||||
assert(internalType == tString);
|
assert(internalType == tString);
|
||||||
return std::string_view(string.s);
|
return std::string_view(string.c_str);
|
||||||
|
}
|
||||||
|
|
||||||
|
const char * const c_str() const
|
||||||
|
{
|
||||||
|
assert(internalType == tString);
|
||||||
|
return string.c_str;
|
||||||
|
}
|
||||||
|
|
||||||
|
const char * * context() const
|
||||||
|
{
|
||||||
|
return string.context;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,12 @@
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;
|
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An `Attrs` can be thought of a JSON object restricted or simplified
|
||||||
|
* to be "flat", not containing any subcontainers (arrays or objects)
|
||||||
|
* and also not containing any `null`s.
|
||||||
|
*/
|
||||||
typedef std::map<std::string, Attr> Attrs;
|
typedef std::map<std::string, Attr> Attrs;
|
||||||
|
|
||||||
Attrs jsonToAttrs(const nlohmann::json & json);
|
Attrs jsonToAttrs(const nlohmann::json & json);
|
||||||
|
|
|
@ -36,6 +36,7 @@ Input Input::fromURL(const ParsedURL & url, bool requireTree)
|
||||||
for (auto & inputScheme : *inputSchemes) {
|
for (auto & inputScheme : *inputSchemes) {
|
||||||
auto res = inputScheme->inputFromURL(url, requireTree);
|
auto res = inputScheme->inputFromURL(url, requireTree);
|
||||||
if (res) {
|
if (res) {
|
||||||
|
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
||||||
res->scheme = inputScheme;
|
res->scheme = inputScheme;
|
||||||
fixupInput(*res);
|
fixupInput(*res);
|
||||||
return std::move(*res);
|
return std::move(*res);
|
||||||
|
@ -50,6 +51,7 @@ Input Input::fromAttrs(Attrs && attrs)
|
||||||
for (auto & inputScheme : *inputSchemes) {
|
for (auto & inputScheme : *inputSchemes) {
|
||||||
auto res = inputScheme->inputFromAttrs(attrs);
|
auto res = inputScheme->inputFromAttrs(attrs);
|
||||||
if (res) {
|
if (res) {
|
||||||
|
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
||||||
res->scheme = inputScheme;
|
res->scheme = inputScheme;
|
||||||
fixupInput(*res);
|
fixupInput(*res);
|
||||||
return std::move(*res);
|
return std::move(*res);
|
||||||
|
@ -254,7 +256,8 @@ std::optional<Hash> Input::getRev() const
|
||||||
try {
|
try {
|
||||||
hash = Hash::parseAnyPrefixed(*s);
|
hash = Hash::parseAnyPrefixed(*s);
|
||||||
} catch (BadHash &e) {
|
} catch (BadHash &e) {
|
||||||
// Default to sha1 for backwards compatibility with existing flakes
|
// Default to sha1 for backwards compatibility with existing
|
||||||
|
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
||||||
hash = Hash::parseAny(*s, htSHA1);
|
hash = Hash::parseAny(*s, htSHA1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -308,4 +311,9 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||||
throw Error("do not know how to clone input '%s'", input.to_string());
|
throw Error("do not know how to clone input '%s'", input.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<ExperimentalFeature> InputScheme::experimentalFeature()
|
||||||
|
{
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,12 +22,11 @@ struct Tree
|
||||||
struct InputScheme;
|
struct InputScheme;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Input object is generated by a specific fetcher, based on the
|
* The `Input` object is generated by a specific fetcher, based on
|
||||||
* user-supplied input attribute in the flake.nix file, and contains
|
* user-supplied information, and contains
|
||||||
* the information that the specific fetcher needs to perform the
|
* the information that the specific fetcher needs to perform the
|
||||||
* actual fetch. The Input object is most commonly created via the
|
* actual fetch. The Input object is most commonly created via the
|
||||||
* "fromURL()" or "fromAttrs()" static functions which are provided
|
* `fromURL()` or `fromAttrs()` static functions.
|
||||||
* the url or attrset specified in the flake file.
|
|
||||||
*/
|
*/
|
||||||
struct Input
|
struct Input
|
||||||
{
|
{
|
||||||
|
@ -44,10 +43,20 @@ struct Input
|
||||||
std::optional<Path> parent;
|
std::optional<Path> parent;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
/**
|
||||||
|
* Create an `Input` from a URL.
|
||||||
|
*
|
||||||
|
* The URL indicate which sort of fetcher, and provides information to that fetcher.
|
||||||
|
*/
|
||||||
static Input fromURL(const std::string & url, bool requireTree = true);
|
static Input fromURL(const std::string & url, bool requireTree = true);
|
||||||
|
|
||||||
static Input fromURL(const ParsedURL & url, bool requireTree = true);
|
static Input fromURL(const ParsedURL & url, bool requireTree = true);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an `Input` from a an `Attrs`.
|
||||||
|
*
|
||||||
|
* The URL indicate which sort of fetcher, and provides information to that fetcher.
|
||||||
|
*/
|
||||||
static Input fromAttrs(Attrs && attrs);
|
static Input fromAttrs(Attrs && attrs);
|
||||||
|
|
||||||
ParsedURL toURL() const;
|
ParsedURL toURL() const;
|
||||||
|
@ -116,13 +125,13 @@ public:
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The InputScheme represents a type of fetcher. Each fetcher
|
* The `InputScheme` represents a type of fetcher. Each fetcher
|
||||||
* registers with nix at startup time. When processing an input for a
|
* registers with nix at startup time. When processing an `Input`,
|
||||||
* flake, each scheme is given an opportunity to "recognize" that
|
* each scheme is given an opportunity to "recognize" that
|
||||||
* input from the url or attributes in the flake file's specification
|
* input from the user-provided url or attributes
|
||||||
* and return an Input object to represent the input if it is
|
* and return an `Input` object to represent the input if it is
|
||||||
* recognized. The Input object contains the information the fetcher
|
* recognized. The `Input` object contains the information the fetcher
|
||||||
* needs to actually perform the "fetch()" when called.
|
* needs to actually perform the `fetch()` when called.
|
||||||
*/
|
*/
|
||||||
struct InputScheme
|
struct InputScheme
|
||||||
{
|
{
|
||||||
|
@ -149,6 +158,11 @@ struct InputScheme
|
||||||
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
||||||
|
|
||||||
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
|
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is this `InputScheme` part of an experimental feature?
|
||||||
|
*/
|
||||||
|
virtual std::optional<ExperimentalFeature> experimentalFeature();
|
||||||
};
|
};
|
||||||
|
|
||||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||||
|
|
|
@ -98,6 +98,11 @@ struct IndirectInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<ExperimentalFeature> experimentalFeature() override
|
||||||
|
{
|
||||||
|
return Xp::Flakes;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
|
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
|
||||||
|
|
|
@ -382,9 +382,9 @@ RunPager::RunPager()
|
||||||
});
|
});
|
||||||
|
|
||||||
pid.setKillSignal(SIGINT);
|
pid.setKillSignal(SIGINT);
|
||||||
stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
|
std_out = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
|
||||||
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
|
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
|
||||||
throw SysError("dupping stdout");
|
throw SysError("dupping standard output");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -393,7 +393,7 @@ RunPager::~RunPager()
|
||||||
try {
|
try {
|
||||||
if (pid != -1) {
|
if (pid != -1) {
|
||||||
std::cout.flush();
|
std::cout.flush();
|
||||||
dup2(stdout, STDOUT_FILENO);
|
dup2(std_out, STDOUT_FILENO);
|
||||||
pid.wait();
|
pid.wait();
|
||||||
}
|
}
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
|
|
@ -85,8 +85,9 @@ struct LegacyArgs : public MixCommonArgs
|
||||||
void showManPage(const std::string & name);
|
void showManPage(const std::string & name);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The constructor of this class starts a pager if stdout is a
|
* The constructor of this class starts a pager if standard output is a
|
||||||
* terminal and $PAGER is set. Stdout is redirected to the pager.
|
* terminal and $PAGER is set. Standard output is redirected to the
|
||||||
|
* pager.
|
||||||
*/
|
*/
|
||||||
class RunPager
|
class RunPager
|
||||||
{
|
{
|
||||||
|
@ -96,7 +97,7 @@ public:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Pid pid;
|
Pid pid;
|
||||||
int stdout;
|
int std_out;
|
||||||
};
|
};
|
||||||
|
|
||||||
extern volatile ::sig_atomic_t blockInt;
|
extern volatile ::sig_atomic_t blockInt;
|
||||||
|
|
18
src/libstore/build-result.cc
Normal file
18
src/libstore/build-result.cc
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
#include "build-result.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
GENERATE_CMP_EXT(
|
||||||
|
,
|
||||||
|
BuildResult,
|
||||||
|
me->status,
|
||||||
|
me->errorMsg,
|
||||||
|
me->timesBuilt,
|
||||||
|
me->isNonDeterministic,
|
||||||
|
me->builtOutputs,
|
||||||
|
me->startTime,
|
||||||
|
me->stopTime,
|
||||||
|
me->cpuUser,
|
||||||
|
me->cpuSystem);
|
||||||
|
|
||||||
|
}
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
#include "realisation.hh"
|
#include "realisation.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
|
#include "comparator.hh"
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
@ -100,6 +101,8 @@ struct BuildResult
|
||||||
*/
|
*/
|
||||||
std::optional<std::chrono::microseconds> cpuUser, cpuSystem;
|
std::optional<std::chrono::microseconds> cpuUser, cpuSystem;
|
||||||
|
|
||||||
|
DECLARE_CMP(BuildResult);
|
||||||
|
|
||||||
bool success()
|
bool success()
|
||||||
{
|
{
|
||||||
return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid;
|
return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid;
|
||||||
|
|
|
@ -1,157 +0,0 @@
|
||||||
#include "create-derivation-and-realise-goal.hh"
|
|
||||||
#include "worker.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
|
||||||
|
|
||||||
CreateDerivationAndRealiseGoal::CreateDerivationAndRealiseGoal(ref<SingleDerivedPath> drvReq,
|
|
||||||
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
|
|
||||||
: Goal(worker, DerivedPath::Built { .drvPath = drvReq, .outputs = wantedOutputs })
|
|
||||||
, drvReq(drvReq)
|
|
||||||
, wantedOutputs(wantedOutputs)
|
|
||||||
, buildMode(buildMode)
|
|
||||||
{
|
|
||||||
state = &CreateDerivationAndRealiseGoal::getDerivation;
|
|
||||||
name = fmt(
|
|
||||||
"outer obtaining drv from '%s' and then building outputs %s",
|
|
||||||
drvReq->to_string(worker.store),
|
|
||||||
std::visit(overloaded {
|
|
||||||
[&](const OutputsSpec::All) -> std::string {
|
|
||||||
return "* (all of them)";
|
|
||||||
},
|
|
||||||
[&](const OutputsSpec::Names os) {
|
|
||||||
return concatStringsSep(", ", quoteStrings(os));
|
|
||||||
},
|
|
||||||
}, wantedOutputs.raw));
|
|
||||||
trace("created outer");
|
|
||||||
|
|
||||||
worker.updateProgress();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
CreateDerivationAndRealiseGoal::~CreateDerivationAndRealiseGoal()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static StorePath pathPartOfReq(const SingleDerivedPath & req)
|
|
||||||
{
|
|
||||||
return std::visit(overloaded {
|
|
||||||
[&](const SingleDerivedPath::Opaque & bo) {
|
|
||||||
return bo.path;
|
|
||||||
},
|
|
||||||
[&](const SingleDerivedPath::Built & bfd) {
|
|
||||||
return pathPartOfReq(*bfd.drvPath);
|
|
||||||
},
|
|
||||||
}, req.raw());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
std::string CreateDerivationAndRealiseGoal::key()
|
|
||||||
{
|
|
||||||
/* Ensure that derivations get built in order of their name,
|
|
||||||
i.e. a derivation named "aardvark" always comes before "baboon". And
|
|
||||||
substitution goals and inner derivation goals always happen before
|
|
||||||
derivation goals (due to "b$"). */
|
|
||||||
return "c$" + std::string(pathPartOfReq(*drvReq).name()) + "$" + drvReq->to_string(worker.store);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CreateDerivationAndRealiseGoal::timedOut(Error && ex)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CreateDerivationAndRealiseGoal::work()
|
|
||||||
{
|
|
||||||
(this->*state)();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CreateDerivationAndRealiseGoal::addWantedOutputs(const OutputsSpec & outputs)
|
|
||||||
{
|
|
||||||
/* If we already want all outputs, there is nothing to do. */
|
|
||||||
auto newWanted = wantedOutputs.union_(outputs);
|
|
||||||
bool needRestart = !newWanted.isSubsetOf(wantedOutputs);
|
|
||||||
wantedOutputs = newWanted;
|
|
||||||
|
|
||||||
if (!needRestart) return;
|
|
||||||
|
|
||||||
if (!optDrvPath)
|
|
||||||
// haven't started steps where the outputs matter yet
|
|
||||||
return;
|
|
||||||
worker.makeDerivationGoal(*optDrvPath, outputs, buildMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CreateDerivationAndRealiseGoal::getDerivation()
|
|
||||||
{
|
|
||||||
trace("outer init");
|
|
||||||
|
|
||||||
/* The first thing to do is to make sure that the derivation
|
|
||||||
exists. If it doesn't, it may be created through a
|
|
||||||
substitute. */
|
|
||||||
if (auto optDrvPath = [this]() -> std::optional<StorePath> {
|
|
||||||
if (buildMode != bmNormal) return std::nullopt;
|
|
||||||
|
|
||||||
auto drvPath = StorePath::dummy;
|
|
||||||
try {
|
|
||||||
drvPath = resolveDerivedPath(worker.store, *drvReq);
|
|
||||||
} catch (MissingRealisation &) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
return worker.evalStore.isValidPath(drvPath) || worker.store.isValidPath(drvPath)
|
|
||||||
? std::optional { drvPath }
|
|
||||||
: std::nullopt;
|
|
||||||
}()) {
|
|
||||||
trace(fmt("already have drv '%s' for '%s', can go straight to building",
|
|
||||||
worker.store.printStorePath(*optDrvPath),
|
|
||||||
drvReq->to_string(worker.store)));
|
|
||||||
|
|
||||||
loadAndBuildDerivation();
|
|
||||||
} else {
|
|
||||||
trace("need to obtain drv we want to build");
|
|
||||||
|
|
||||||
addWaitee(worker.makeGoal(DerivedPath::fromSingle(*drvReq)));
|
|
||||||
|
|
||||||
state = &CreateDerivationAndRealiseGoal::loadAndBuildDerivation;
|
|
||||||
if (waitees.empty()) work();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CreateDerivationAndRealiseGoal::loadAndBuildDerivation()
|
|
||||||
{
|
|
||||||
trace("outer load and build derivation");
|
|
||||||
|
|
||||||
if (nrFailed != 0) {
|
|
||||||
amDone(ecFailed, Error("cannot build missing derivation '%s'", drvReq->to_string(worker.store)));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
StorePath drvPath = resolveDerivedPath(worker.store, *drvReq);
|
|
||||||
/* Build this step! */
|
|
||||||
concreteDrvGoal = worker.makeDerivationGoal(drvPath, wantedOutputs, buildMode);
|
|
||||||
addWaitee(upcast_goal(concreteDrvGoal));
|
|
||||||
state = &CreateDerivationAndRealiseGoal::buildDone;
|
|
||||||
optDrvPath = std::move(drvPath);
|
|
||||||
if (waitees.empty()) work();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CreateDerivationAndRealiseGoal::buildDone()
|
|
||||||
{
|
|
||||||
trace("outer build done");
|
|
||||||
|
|
||||||
buildResult = upcast_goal(concreteDrvGoal)->getBuildResult(DerivedPath::Built {
|
|
||||||
.drvPath = drvReq,
|
|
||||||
.outputs = wantedOutputs,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (buildResult.success())
|
|
||||||
amDone(ecSuccess);
|
|
||||||
else
|
|
||||||
amDone(ecFailed, Error("building '%s' failed", drvReq->to_string(worker.store)));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,96 +0,0 @@
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include "parsed-derivations.hh"
|
|
||||||
#include "lock.hh"
|
|
||||||
#include "store-api.hh"
|
|
||||||
#include "pathlocks.hh"
|
|
||||||
#include "goal.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
|
||||||
|
|
||||||
struct DerivationGoal;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This goal type is essentially the serial composition (like function
|
|
||||||
* composition) of a goal for getting a derivation, and then a
|
|
||||||
* `DerivationGoal` using the newly-obtained derivation.
|
|
||||||
*
|
|
||||||
* In the (currently experimental) general inductive case of derivations
|
|
||||||
* that are themselves build outputs, that first goal will be *another*
|
|
||||||
* `CreateDerivationAndRealiseGoal`. In the (much more common) base-case
|
|
||||||
* where the derivation has no provence and is just referred to by
|
|
||||||
* (content-addressed) store path, that first goal is a
|
|
||||||
* `SubstitutionGoal`.
|
|
||||||
*
|
|
||||||
* If we already have the derivation (e.g. if the evalutator has created
|
|
||||||
* the derivation locally and then instructured the store to build it),
|
|
||||||
* we can skip the first goal entirely as a small optimization.
|
|
||||||
*/
|
|
||||||
struct CreateDerivationAndRealiseGoal : public Goal
|
|
||||||
{
|
|
||||||
/**
|
|
||||||
* How to obtain a store path of the derivation to build.
|
|
||||||
*/
|
|
||||||
ref<SingleDerivedPath> drvReq;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The path of the derivation, once obtained.
|
|
||||||
**/
|
|
||||||
std::optional<StorePath> optDrvPath;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The goal for the corresponding concrete derivation.
|
|
||||||
**/
|
|
||||||
std::shared_ptr<DerivationGoal> concreteDrvGoal;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The specific outputs that we need to build.
|
|
||||||
*/
|
|
||||||
OutputsSpec wantedOutputs;
|
|
||||||
|
|
||||||
typedef void (CreateDerivationAndRealiseGoal::*GoalState)();
|
|
||||||
GoalState state;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The final output paths of the build.
|
|
||||||
*
|
|
||||||
* - For input-addressed derivations, always the precomputed paths
|
|
||||||
*
|
|
||||||
* - For content-addressed derivations, calcuated from whatever the
|
|
||||||
* hash ends up being. (Note that fixed outputs derivations that
|
|
||||||
* produce the "wrong" output still install that data under its
|
|
||||||
* true content-address.)
|
|
||||||
*/
|
|
||||||
OutputPathMap finalOutputs;
|
|
||||||
|
|
||||||
BuildMode buildMode;
|
|
||||||
|
|
||||||
CreateDerivationAndRealiseGoal(ref<SingleDerivedPath> drvReq,
|
|
||||||
const OutputsSpec & wantedOutputs, Worker & worker,
|
|
||||||
BuildMode buildMode = bmNormal);
|
|
||||||
virtual ~CreateDerivationAndRealiseGoal();
|
|
||||||
|
|
||||||
void timedOut(Error && ex) override;
|
|
||||||
|
|
||||||
std::string key() override;
|
|
||||||
|
|
||||||
void work() override;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add wanted outputs to an already existing derivation goal.
|
|
||||||
*/
|
|
||||||
void addWantedOutputs(const OutputsSpec & outputs);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The states.
|
|
||||||
*/
|
|
||||||
void getDerivation();
|
|
||||||
void loadAndBuildDerivation();
|
|
||||||
void buildDone();
|
|
||||||
|
|
||||||
JobCategory jobCategory() const override {
|
|
||||||
return JobCategory::Administration;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
|
@ -8,8 +8,8 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "compression.hh"
|
#include "compression.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "common-protocol.hh"
|
||||||
#include "worker-protocol-impl.hh"
|
#include "common-protocol-impl.hh"
|
||||||
#include "topo-sort.hh"
|
#include "topo-sort.hh"
|
||||||
#include "callback.hh"
|
#include "callback.hh"
|
||||||
#include "local-store.hh" // TODO remove, along with remaining downcasts
|
#include "local-store.hh" // TODO remove, along with remaining downcasts
|
||||||
|
@ -71,7 +71,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||||
, wantedOutputs(wantedOutputs)
|
, wantedOutputs(wantedOutputs)
|
||||||
, buildMode(buildMode)
|
, buildMode(buildMode)
|
||||||
{
|
{
|
||||||
state = &DerivationGoal::loadDerivation;
|
state = &DerivationGoal::getDerivation;
|
||||||
name = fmt(
|
name = fmt(
|
||||||
"building of '%s' from .drv file",
|
"building of '%s' from .drv file",
|
||||||
DerivedPath::Built { makeConstantStorePathRef(drvPath), wantedOutputs }.to_string(worker.store));
|
DerivedPath::Built { makeConstantStorePathRef(drvPath), wantedOutputs }.to_string(worker.store));
|
||||||
|
@ -164,6 +164,24 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void DerivationGoal::getDerivation()
|
||||||
|
{
|
||||||
|
trace("init");
|
||||||
|
|
||||||
|
/* The first thing to do is to make sure that the derivation
|
||||||
|
exists. If it doesn't, it may be created through a
|
||||||
|
substitute. */
|
||||||
|
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
|
||||||
|
loadDerivation();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath)));
|
||||||
|
|
||||||
|
state = &DerivationGoal::loadDerivation;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void DerivationGoal::loadDerivation()
|
void DerivationGoal::loadDerivation()
|
||||||
{
|
{
|
||||||
trace("loading derivation");
|
trace("loading derivation");
|
||||||
|
@ -1167,11 +1185,11 @@ HookReply DerivationGoal::tryBuildHook()
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
WorkerProto::WriteConn conn { hook->sink };
|
CommonProto::WriteConn conn { hook->sink };
|
||||||
|
|
||||||
/* Tell the hook all the inputs that have to be copied to the
|
/* Tell the hook all the inputs that have to be copied to the
|
||||||
remote system. */
|
remote system. */
|
||||||
WorkerProto::write(worker.store, conn, inputPaths);
|
CommonProto::write(worker.store, conn, inputPaths);
|
||||||
|
|
||||||
/* Tell the hooks the missing outputs that have to be copied back
|
/* Tell the hooks the missing outputs that have to be copied back
|
||||||
from the remote system. */
|
from the remote system. */
|
||||||
|
@ -1182,7 +1200,7 @@ HookReply DerivationGoal::tryBuildHook()
|
||||||
if (buildMode != bmCheck && status.known && status.known->isValid()) continue;
|
if (buildMode != bmCheck && status.known && status.known->isValid()) continue;
|
||||||
missingOutputs.insert(outputName);
|
missingOutputs.insert(outputName);
|
||||||
}
|
}
|
||||||
WorkerProto::write(worker.store, conn, missingOutputs);
|
CommonProto::write(worker.store, conn, missingOutputs);
|
||||||
}
|
}
|
||||||
|
|
||||||
hook->sink = FdSink();
|
hook->sink = FdSink();
|
||||||
|
@ -1498,24 +1516,23 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
|
||||||
if (!useDerivation) return;
|
if (!useDerivation) return;
|
||||||
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
|
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
|
||||||
|
|
||||||
std::optional info = tryGetConcreteDrvGoal(waitee);
|
auto * dg = dynamic_cast<DerivationGoal *>(&*waitee);
|
||||||
if (!info) return;
|
if (!dg) return;
|
||||||
const auto & [dg, drvReq] = *info;
|
|
||||||
|
|
||||||
auto * nodeP = fullDrv.inputDrvs.findSlot(drvReq.get());
|
auto * nodeP = fullDrv.inputDrvs.findSlot(DerivedPath::Opaque { .path = dg->drvPath });
|
||||||
if (!nodeP) return;
|
if (!nodeP) return;
|
||||||
auto & outputs = nodeP->value;
|
auto & outputs = nodeP->value;
|
||||||
|
|
||||||
for (auto & outputName : outputs) {
|
for (auto & outputName : outputs) {
|
||||||
auto buildResult = dg.get().getBuildResult(DerivedPath::Built {
|
auto buildResult = dg->getBuildResult(DerivedPath::Built {
|
||||||
.drvPath = makeConstantStorePathRef(dg.get().drvPath),
|
.drvPath = makeConstantStorePathRef(dg->drvPath),
|
||||||
.outputs = OutputsSpec::Names { outputName },
|
.outputs = OutputsSpec::Names { outputName },
|
||||||
});
|
});
|
||||||
if (buildResult.success()) {
|
if (buildResult.success()) {
|
||||||
auto i = buildResult.builtOutputs.find(outputName);
|
auto i = buildResult.builtOutputs.find(outputName);
|
||||||
if (i != buildResult.builtOutputs.end())
|
if (i != buildResult.builtOutputs.end())
|
||||||
inputDrvOutputs.insert_or_assign(
|
inputDrvOutputs.insert_or_assign(
|
||||||
{ dg.get().drvPath, outputName },
|
{ dg->drvPath, outputName },
|
||||||
i->second.outPath);
|
i->second.outPath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,10 +52,6 @@ struct InitialOutput {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A goal for building some or all of the outputs of a derivation.
|
* A goal for building some or all of the outputs of a derivation.
|
||||||
*
|
|
||||||
* The derivation must already be present, either in the store in a drv
|
|
||||||
* or in memory. If the derivation itself needs to be gotten first, a
|
|
||||||
* `CreateDerivationAndRealiseGoal` goal must be used instead.
|
|
||||||
*/
|
*/
|
||||||
struct DerivationGoal : public Goal
|
struct DerivationGoal : public Goal
|
||||||
{
|
{
|
||||||
|
@ -235,6 +231,7 @@ struct DerivationGoal : public Goal
|
||||||
/**
|
/**
|
||||||
* The states.
|
* The states.
|
||||||
*/
|
*/
|
||||||
|
void getDerivation();
|
||||||
void loadDerivation();
|
void loadDerivation();
|
||||||
void haveDerivation();
|
void haveDerivation();
|
||||||
void outputsSubstitutionTried();
|
void outputsSubstitutionTried();
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#include "worker.hh"
|
#include "worker.hh"
|
||||||
#include "substitution-goal.hh"
|
#include "substitution-goal.hh"
|
||||||
#include "create-derivation-and-realise-goal.hh"
|
|
||||||
#include "derivation-goal.hh"
|
#include "derivation-goal.hh"
|
||||||
#include "local-store.hh"
|
#include "local-store.hh"
|
||||||
|
|
||||||
|
@ -16,7 +15,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||||
|
|
||||||
worker.run(goals);
|
worker.run(goals);
|
||||||
|
|
||||||
StringSet failed;
|
StorePathSet failed;
|
||||||
std::optional<Error> ex;
|
std::optional<Error> ex;
|
||||||
for (auto & i : goals) {
|
for (auto & i : goals) {
|
||||||
if (i->ex) {
|
if (i->ex) {
|
||||||
|
@ -26,10 +25,10 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||||
ex = std::move(i->ex);
|
ex = std::move(i->ex);
|
||||||
}
|
}
|
||||||
if (i->exitCode != Goal::ecSuccess) {
|
if (i->exitCode != Goal::ecSuccess) {
|
||||||
if (auto i2 = dynamic_cast<CreateDerivationAndRealiseGoal *>(i.get()))
|
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
|
||||||
failed.insert(i2->drvReq->to_string(*this));
|
failed.insert(i2->drvPath);
|
||||||
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
||||||
failed.insert(printStorePath(i2->storePath));
|
failed.insert(i2->storePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +37,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
||||||
throw std::move(*ex);
|
throw std::move(*ex);
|
||||||
} else if (!failed.empty()) {
|
} else if (!failed.empty()) {
|
||||||
if (ex) logError(ex->info());
|
if (ex) logError(ex->info());
|
||||||
throw Error(worker.failingExitStatus(), "build of %s failed", concatStringsSep(", ", quoteStrings(failed)));
|
throw Error(worker.failingExitStatus(), "build of %s failed", showPaths(failed));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -49,16 +49,6 @@ enum struct JobCategory {
|
||||||
* A substitution an arbitrary store object; it will use network resources.
|
* A substitution an arbitrary store object; it will use network resources.
|
||||||
*/
|
*/
|
||||||
Substitution,
|
Substitution,
|
||||||
/**
|
|
||||||
* A goal that does no "real" work by itself, and just exists to depend on
|
|
||||||
* other goals which *do* do real work. These goals therefore are not
|
|
||||||
* limited.
|
|
||||||
*
|
|
||||||
* These goals cannot infinitely create themselves, so there is no risk of
|
|
||||||
* a "fork bomb" type situation (which would be a problem even though the
|
|
||||||
* goal do no real work) either.
|
|
||||||
*/
|
|
||||||
Administration,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Goal : public std::enable_shared_from_this<Goal>
|
struct Goal : public std::enable_shared_from_this<Goal>
|
||||||
|
|
|
@ -1135,8 +1135,18 @@ void LocalDerivationGoal::initEnv()
|
||||||
fixed-output derivations is by definition pure (since we
|
fixed-output derivations is by definition pure (since we
|
||||||
already know the cryptographic hash of the output). */
|
already know the cryptographic hash of the output). */
|
||||||
if (!derivationType->isSandboxed()) {
|
if (!derivationType->isSandboxed()) {
|
||||||
for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings()))
|
auto & impureEnv = settings.impureEnv.get();
|
||||||
env[i] = getEnv(i).value_or("");
|
if (!impureEnv.empty())
|
||||||
|
experimentalFeatureSettings.require(Xp::ConfigurableImpureEnv);
|
||||||
|
|
||||||
|
for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) {
|
||||||
|
auto envVar = impureEnv.find(i);
|
||||||
|
if (envVar != impureEnv.end()) {
|
||||||
|
env[i] = envVar->second;
|
||||||
|
} else {
|
||||||
|
env[i] = getEnv(i).value_or("");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Currently structured log messages piggyback on stderr, but we
|
/* Currently structured log messages piggyback on stderr, but we
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
#include "worker.hh"
|
#include "worker.hh"
|
||||||
#include "substitution-goal.hh"
|
#include "substitution-goal.hh"
|
||||||
#include "drv-output-substitution-goal.hh"
|
#include "drv-output-substitution-goal.hh"
|
||||||
#include "create-derivation-and-realise-goal.hh"
|
|
||||||
#include "local-derivation-goal.hh"
|
#include "local-derivation-goal.hh"
|
||||||
#include "hook-instance.hh"
|
#include "hook-instance.hh"
|
||||||
|
|
||||||
|
@ -42,24 +41,6 @@ Worker::~Worker()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::shared_ptr<CreateDerivationAndRealiseGoal> Worker::makeCreateDerivationAndRealiseGoal(
|
|
||||||
ref<SingleDerivedPath> drvReq,
|
|
||||||
const OutputsSpec & wantedOutputs,
|
|
||||||
BuildMode buildMode)
|
|
||||||
{
|
|
||||||
std::weak_ptr<CreateDerivationAndRealiseGoal> & goal_weak = outerDerivationGoals.ensureSlot(*drvReq).value;
|
|
||||||
std::shared_ptr<CreateDerivationAndRealiseGoal> goal = goal_weak.lock();
|
|
||||||
if (!goal) {
|
|
||||||
goal = std::make_shared<CreateDerivationAndRealiseGoal>(drvReq, wantedOutputs, *this, buildMode);
|
|
||||||
goal_weak = goal;
|
|
||||||
wakeUp(goal);
|
|
||||||
} else {
|
|
||||||
goal->addWantedOutputs(wantedOutputs);
|
|
||||||
}
|
|
||||||
return goal;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
||||||
const StorePath & drvPath,
|
const StorePath & drvPath,
|
||||||
const OutputsSpec & wantedOutputs,
|
const OutputsSpec & wantedOutputs,
|
||||||
|
@ -130,7 +111,10 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
|
||||||
{
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[&](const DerivedPath::Built & bfd) -> GoalPtr {
|
[&](const DerivedPath::Built & bfd) -> GoalPtr {
|
||||||
return makeCreateDerivationAndRealiseGoal(bfd.drvPath, bfd.outputs, buildMode);
|
if (auto bop = std::get_if<DerivedPath::Opaque>(&*bfd.drvPath))
|
||||||
|
return makeDerivationGoal(bop->path, bfd.outputs, buildMode);
|
||||||
|
else
|
||||||
|
throw UnimplementedError("Building dynamic derivations in one shot is not yet implemented.");
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Opaque & bo) -> GoalPtr {
|
[&](const DerivedPath::Opaque & bo) -> GoalPtr {
|
||||||
return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair);
|
return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair);
|
||||||
|
@ -139,46 +123,24 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template<typename K, typename V, typename F>
|
|
||||||
static void cullMap(std::map<K, V> & goalMap, F f)
|
|
||||||
{
|
|
||||||
for (auto i = goalMap.begin(); i != goalMap.end();)
|
|
||||||
if (!f(i->second))
|
|
||||||
i = goalMap.erase(i);
|
|
||||||
else ++i;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template<typename K, typename G>
|
template<typename K, typename G>
|
||||||
static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
|
static void removeGoal(std::shared_ptr<G> goal, std::map<K, std::weak_ptr<G>> & goalMap)
|
||||||
{
|
{
|
||||||
/* !!! inefficient */
|
/* !!! inefficient */
|
||||||
cullMap(goalMap, [&](const std::weak_ptr<G> & gp) -> bool {
|
for (auto i = goalMap.begin();
|
||||||
return gp.lock() != goal;
|
i != goalMap.end(); )
|
||||||
});
|
if (i->second.lock() == goal) {
|
||||||
}
|
auto j = i; ++j;
|
||||||
|
goalMap.erase(i);
|
||||||
template<typename K>
|
i = j;
|
||||||
static void removeGoal(std::shared_ptr<CreateDerivationAndRealiseGoal> goal, std::map<K, DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>::ChildNode> & goalMap);
|
}
|
||||||
|
else ++i;
|
||||||
template<typename K>
|
|
||||||
static void removeGoal(std::shared_ptr<CreateDerivationAndRealiseGoal> goal, std::map<K, DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>::ChildNode> & goalMap)
|
|
||||||
{
|
|
||||||
/* !!! inefficient */
|
|
||||||
cullMap(goalMap, [&](DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>::ChildNode & node) -> bool {
|
|
||||||
if (node.value.lock() == goal)
|
|
||||||
node.value.reset();
|
|
||||||
removeGoal(goal, node.childMap);
|
|
||||||
return !node.value.expired() || !node.childMap.empty();
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Worker::removeGoal(GoalPtr goal)
|
void Worker::removeGoal(GoalPtr goal)
|
||||||
{
|
{
|
||||||
if (auto drvGoal = std::dynamic_pointer_cast<CreateDerivationAndRealiseGoal>(goal))
|
if (auto drvGoal = std::dynamic_pointer_cast<DerivationGoal>(goal))
|
||||||
nix::removeGoal(drvGoal, outerDerivationGoals.map);
|
|
||||||
else if (auto drvGoal = std::dynamic_pointer_cast<DerivationGoal>(goal))
|
|
||||||
nix::removeGoal(drvGoal, derivationGoals);
|
nix::removeGoal(drvGoal, derivationGoals);
|
||||||
else if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal))
|
else if (auto subGoal = std::dynamic_pointer_cast<PathSubstitutionGoal>(goal))
|
||||||
nix::removeGoal(subGoal, substitutionGoals);
|
nix::removeGoal(subGoal, substitutionGoals);
|
||||||
|
@ -236,19 +198,8 @@ void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||||
child.respectTimeouts = respectTimeouts;
|
child.respectTimeouts = respectTimeouts;
|
||||||
children.emplace_back(child);
|
children.emplace_back(child);
|
||||||
if (inBuildSlot) {
|
if (inBuildSlot) {
|
||||||
switch (goal->jobCategory()) {
|
if (goal->jobCategory() == JobCategory::Substitution) nrSubstitutions++;
|
||||||
case JobCategory::Substitution:
|
else nrLocalBuilds++;
|
||||||
nrSubstitutions++;
|
|
||||||
break;
|
|
||||||
case JobCategory::Build:
|
|
||||||
nrLocalBuilds++;
|
|
||||||
break;
|
|
||||||
case JobCategory::Administration:
|
|
||||||
/* Intentionally not limited, see docs */
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,20 +211,12 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
||||||
if (i == children.end()) return;
|
if (i == children.end()) return;
|
||||||
|
|
||||||
if (i->inBuildSlot) {
|
if (i->inBuildSlot) {
|
||||||
switch (goal->jobCategory()) {
|
if (goal->jobCategory() == JobCategory::Substitution) {
|
||||||
case JobCategory::Substitution:
|
|
||||||
assert(nrSubstitutions > 0);
|
assert(nrSubstitutions > 0);
|
||||||
nrSubstitutions--;
|
nrSubstitutions--;
|
||||||
break;
|
} else {
|
||||||
case JobCategory::Build:
|
|
||||||
assert(nrLocalBuilds > 0);
|
assert(nrLocalBuilds > 0);
|
||||||
nrLocalBuilds--;
|
nrLocalBuilds--;
|
||||||
break;
|
|
||||||
case JobCategory::Administration:
|
|
||||||
/* Intentionally not limited, see docs */
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
abort();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,9 +267,9 @@ void Worker::run(const Goals & _topGoals)
|
||||||
|
|
||||||
for (auto & i : _topGoals) {
|
for (auto & i : _topGoals) {
|
||||||
topGoals.insert(i);
|
topGoals.insert(i);
|
||||||
if (auto goal = dynamic_cast<CreateDerivationAndRealiseGoal *>(i.get())) {
|
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
|
||||||
topPaths.push_back(DerivedPath::Built {
|
topPaths.push_back(DerivedPath::Built {
|
||||||
.drvPath = goal->drvReq,
|
.drvPath = makeConstantStorePathRef(goal->drvPath),
|
||||||
.outputs = goal->wantedOutputs,
|
.outputs = goal->wantedOutputs,
|
||||||
});
|
});
|
||||||
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
|
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
|
||||||
|
@ -589,19 +532,4 @@ GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal)
|
||||||
return subGoal;
|
return subGoal;
|
||||||
}
|
}
|
||||||
|
|
||||||
GoalPtr upcast_goal(std::shared_ptr<DerivationGoal> subGoal)
|
|
||||||
{
|
|
||||||
return subGoal;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<std::pair<std::reference_wrapper<const DerivationGoal>, std::reference_wrapper<const SingleDerivedPath>>> tryGetConcreteDrvGoal(GoalPtr waitee)
|
|
||||||
{
|
|
||||||
auto * odg = dynamic_cast<CreateDerivationAndRealiseGoal *>(&*waitee);
|
|
||||||
if (!odg) return std::nullopt;
|
|
||||||
return {{
|
|
||||||
std::cref(*odg->concreteDrvGoal),
|
|
||||||
std::cref(*odg->drvReq),
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "lock.hh"
|
#include "lock.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "derived-path-map.hh"
|
|
||||||
#include "goal.hh"
|
#include "goal.hh"
|
||||||
#include "realisation.hh"
|
#include "realisation.hh"
|
||||||
|
|
||||||
|
@ -14,7 +13,6 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
/* Forward definition. */
|
/* Forward definition. */
|
||||||
struct CreateDerivationAndRealiseGoal;
|
|
||||||
struct DerivationGoal;
|
struct DerivationGoal;
|
||||||
struct PathSubstitutionGoal;
|
struct PathSubstitutionGoal;
|
||||||
class DrvOutputSubstitutionGoal;
|
class DrvOutputSubstitutionGoal;
|
||||||
|
@ -33,25 +31,9 @@ class DrvOutputSubstitutionGoal;
|
||||||
*/
|
*/
|
||||||
GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal);
|
GoalPtr upcast_goal(std::shared_ptr<PathSubstitutionGoal> subGoal);
|
||||||
GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal);
|
GoalPtr upcast_goal(std::shared_ptr<DrvOutputSubstitutionGoal> subGoal);
|
||||||
GoalPtr upcast_goal(std::shared_ptr<DerivationGoal> subGoal);
|
|
||||||
|
|
||||||
typedef std::chrono::time_point<std::chrono::steady_clock> steady_time_point;
|
typedef std::chrono::time_point<std::chrono::steady_clock> steady_time_point;
|
||||||
|
|
||||||
/**
|
|
||||||
* The current implementation of impure derivations has
|
|
||||||
* `DerivationGoal`s accumulate realisations from their waitees.
|
|
||||||
* Unfortunately, `DerivationGoal`s don't directly depend on other
|
|
||||||
* goals, but instead depend on `CreateDerivationAndRealiseGoal`s.
|
|
||||||
*
|
|
||||||
* We try not to share any of the details of any goal type with any
|
|
||||||
* other, for sake of modularity and quicker rebuilds. This means we
|
|
||||||
* cannot "just" downcast and fish out the field. So as an escape hatch,
|
|
||||||
* we have made the function, written in `worker.cc` where all the goal
|
|
||||||
* types are visible, and use it instead.
|
|
||||||
*/
|
|
||||||
|
|
||||||
std::optional<std::pair<std::reference_wrapper<const DerivationGoal>, std::reference_wrapper<const SingleDerivedPath>>> tryGetConcreteDrvGoal(GoalPtr waitee);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A mapping used to remember for each child process to what goal it
|
* A mapping used to remember for each child process to what goal it
|
||||||
* belongs, and file descriptors for receiving log data and output
|
* belongs, and file descriptors for receiving log data and output
|
||||||
|
@ -119,9 +101,6 @@ private:
|
||||||
* Maps used to prevent multiple instantiations of a goal for the
|
* Maps used to prevent multiple instantiations of a goal for the
|
||||||
* same derivation / path.
|
* same derivation / path.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>> outerDerivationGoals;
|
|
||||||
|
|
||||||
std::map<StorePath, std::weak_ptr<DerivationGoal>> derivationGoals;
|
std::map<StorePath, std::weak_ptr<DerivationGoal>> derivationGoals;
|
||||||
std::map<StorePath, std::weak_ptr<PathSubstitutionGoal>> substitutionGoals;
|
std::map<StorePath, std::weak_ptr<PathSubstitutionGoal>> substitutionGoals;
|
||||||
std::map<DrvOutput, std::weak_ptr<DrvOutputSubstitutionGoal>> drvOutputSubstitutionGoals;
|
std::map<DrvOutput, std::weak_ptr<DrvOutputSubstitutionGoal>> drvOutputSubstitutionGoals;
|
||||||
|
@ -209,9 +188,6 @@ public:
|
||||||
* @ref DerivationGoal "derivation goal"
|
* @ref DerivationGoal "derivation goal"
|
||||||
*/
|
*/
|
||||||
private:
|
private:
|
||||||
std::shared_ptr<CreateDerivationAndRealiseGoal> makeCreateDerivationAndRealiseGoal(
|
|
||||||
ref<SingleDerivedPath> drvPath,
|
|
||||||
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
|
||||||
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
||||||
const StorePath & drvPath, const OutputsSpec & wantedOutputs,
|
const StorePath & drvPath, const OutputsSpec & wantedOutputs,
|
||||||
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
|
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
|
||||||
|
|
41
src/libstore/common-protocol-impl.hh
Normal file
41
src/libstore/common-protocol-impl.hh
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
#pragma once
|
||||||
|
/**
|
||||||
|
* @file
|
||||||
|
*
|
||||||
|
* Template implementations (as opposed to mere declarations).
|
||||||
|
*
|
||||||
|
* This file is an exmample of the "impl.hh" pattern. See the
|
||||||
|
* contributing guide.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "common-protocol.hh"
|
||||||
|
#include "length-prefixed-protocol-helper.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/* protocol-agnostic templates */
|
||||||
|
|
||||||
|
#define COMMON_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \
|
||||||
|
TEMPLATE T CommonProto::Serialise< T >::read(const Store & store, CommonProto::ReadConn conn) \
|
||||||
|
{ \
|
||||||
|
return LengthPrefixedProtoHelper<CommonProto, T >::read(store, conn); \
|
||||||
|
} \
|
||||||
|
TEMPLATE void CommonProto::Serialise< T >::write(const Store & store, CommonProto::WriteConn conn, const T & t) \
|
||||||
|
{ \
|
||||||
|
LengthPrefixedProtoHelper<CommonProto, T >::write(store, conn, t); \
|
||||||
|
}
|
||||||
|
|
||||||
|
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::vector<T>)
|
||||||
|
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::set<T>)
|
||||||
|
COMMON_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
||||||
|
|
||||||
|
#define COMMA_ ,
|
||||||
|
COMMON_USE_LENGTH_PREFIX_SERIALISER(
|
||||||
|
template<typename K COMMA_ typename V>,
|
||||||
|
std::map<K COMMA_ V>)
|
||||||
|
#undef COMMA_
|
||||||
|
|
||||||
|
|
||||||
|
/* protocol-specific templates */
|
||||||
|
|
||||||
|
}
|
98
src/libstore/common-protocol.cc
Normal file
98
src/libstore/common-protocol.cc
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
#include "serialise.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
#include "path-with-outputs.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "build-result.hh"
|
||||||
|
#include "common-protocol.hh"
|
||||||
|
#include "common-protocol-impl.hh"
|
||||||
|
#include "archive.hh"
|
||||||
|
#include "derivations.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/* protocol-agnostic definitions */
|
||||||
|
|
||||||
|
std::string CommonProto::Serialise<std::string>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
return readString(conn.from);
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<std::string>::write(const Store & store, CommonProto::WriteConn conn, const std::string & str)
|
||||||
|
{
|
||||||
|
conn.to << str;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
StorePath CommonProto::Serialise<StorePath>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
return store.parseStorePath(readString(conn.from));
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<StorePath>::write(const Store & store, CommonProto::WriteConn conn, const StorePath & storePath)
|
||||||
|
{
|
||||||
|
conn.to << store.printStorePath(storePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ContentAddress CommonProto::Serialise<ContentAddress>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
return ContentAddress::parse(readString(conn.from));
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<ContentAddress>::write(const Store & store, CommonProto::WriteConn conn, const ContentAddress & ca)
|
||||||
|
{
|
||||||
|
conn.to << renderContentAddress(ca);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Realisation CommonProto::Serialise<Realisation>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
std::string rawInput = readString(conn.from);
|
||||||
|
return Realisation::fromJSON(
|
||||||
|
nlohmann::json::parse(rawInput),
|
||||||
|
"remote-protocol"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<Realisation>::write(const Store & store, CommonProto::WriteConn conn, const Realisation & realisation)
|
||||||
|
{
|
||||||
|
conn.to << realisation.toJSON().dump();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
DrvOutput CommonProto::Serialise<DrvOutput>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
return DrvOutput::parse(readString(conn.from));
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<DrvOutput>::write(const Store & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput)
|
||||||
|
{
|
||||||
|
conn.to << drvOutput.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<StorePath> CommonProto::Serialise<std::optional<StorePath>>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
auto s = readString(conn.from);
|
||||||
|
return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<std::optional<StorePath>>::write(const Store & store, CommonProto::WriteConn conn, const std::optional<StorePath> & storePathOpt)
|
||||||
|
{
|
||||||
|
conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<ContentAddress> CommonProto::Serialise<std::optional<ContentAddress>>::read(const Store & store, CommonProto::ReadConn conn)
|
||||||
|
{
|
||||||
|
return ContentAddress::parseOpt(readString(conn.from));
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommonProto::Serialise<std::optional<ContentAddress>>::write(const Store & store, CommonProto::WriteConn conn, const std::optional<ContentAddress> & caOpt)
|
||||||
|
{
|
||||||
|
conn.to << (caOpt ? renderContentAddress(*caOpt) : "");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
106
src/libstore/common-protocol.hh
Normal file
106
src/libstore/common-protocol.hh
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "serialise.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
struct Source;
|
||||||
|
|
||||||
|
// items being serialized
|
||||||
|
class StorePath;
|
||||||
|
struct ContentAddress;
|
||||||
|
struct DrvOutput;
|
||||||
|
struct Realisation;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shared serializers between the worker protocol, serve protocol, and a
|
||||||
|
* few others.
|
||||||
|
*
|
||||||
|
* This `struct` is basically just a `namespace`; We use a type rather
|
||||||
|
* than a namespace just so we can use it as a template argument.
|
||||||
|
*/
|
||||||
|
struct CommonProto
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* A unidirectional read connection, to be used by the read half of the
|
||||||
|
* canonical serializers below.
|
||||||
|
*/
|
||||||
|
struct ReadConn {
|
||||||
|
Source & from;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A unidirectional write connection, to be used by the write half of the
|
||||||
|
* canonical serializers below.
|
||||||
|
*/
|
||||||
|
struct WriteConn {
|
||||||
|
Sink & to;
|
||||||
|
};
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
struct Serialise;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper function around `CommonProto::Serialise<T>::write` that allows us to
|
||||||
|
* infer the type instead of having to write it down explicitly.
|
||||||
|
*/
|
||||||
|
template<typename T>
|
||||||
|
static void write(const Store & store, WriteConn conn, const T & t)
|
||||||
|
{
|
||||||
|
CommonProto::Serialise<T>::write(store, conn, t);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
#define DECLARE_COMMON_SERIALISER(T) \
|
||||||
|
struct CommonProto::Serialise< T > \
|
||||||
|
{ \
|
||||||
|
static T read(const Store & store, CommonProto::ReadConn conn); \
|
||||||
|
static void write(const Store & store, CommonProto::WriteConn conn, const T & str); \
|
||||||
|
}
|
||||||
|
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::string);
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(StorePath);
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(ContentAddress);
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(DrvOutput);
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(Realisation);
|
||||||
|
|
||||||
|
template<typename T>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::vector<T>);
|
||||||
|
template<typename T>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::set<T>);
|
||||||
|
template<typename... Ts>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::tuple<Ts...>);
|
||||||
|
|
||||||
|
#define COMMA_ ,
|
||||||
|
template<typename K, typename V>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::map<K COMMA_ V>);
|
||||||
|
#undef COMMA_
|
||||||
|
|
||||||
|
/**
|
||||||
|
* These use the empty string for the null case, relying on the fact
|
||||||
|
* that the underlying types never serialize to the empty string.
|
||||||
|
*
|
||||||
|
* We do this instead of a generic std::optional<T> instance because
|
||||||
|
* ordinal tags (0 or 1, here) are a bit of a compatability hazard. For
|
||||||
|
* the same reason, we don't have a std::variant<T..> instances (ordinal
|
||||||
|
* tags 0...n).
|
||||||
|
*
|
||||||
|
* We could the generic instances and then these as specializations for
|
||||||
|
* compatability, but that's proven a bit finnicky, and also makes the
|
||||||
|
* worker protocol harder to implement in other languages where such
|
||||||
|
* specializations may not be allowed.
|
||||||
|
*/
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::optional<StorePath>);
|
||||||
|
template<>
|
||||||
|
DECLARE_COMMON_SERIALISER(std::optional<ContentAddress>);
|
||||||
|
|
||||||
|
}
|
|
@ -4,8 +4,8 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "split.hh"
|
#include "split.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "common-protocol.hh"
|
||||||
#include "worker-protocol-impl.hh"
|
#include "common-protocol-impl.hh"
|
||||||
#include "fs-accessor.hh"
|
#include "fs-accessor.hh"
|
||||||
#include <boost/container/small_vector.hpp>
|
#include <boost/container/small_vector.hpp>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
@ -895,8 +895,8 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
|
||||||
drv.outputs.emplace(std::move(name), std::move(output));
|
drv.outputs.emplace(std::move(name), std::move(output));
|
||||||
}
|
}
|
||||||
|
|
||||||
drv.inputSrcs = WorkerProto::Serialise<StorePathSet>::read(store,
|
drv.inputSrcs = CommonProto::Serialise<StorePathSet>::read(store,
|
||||||
WorkerProto::ReadConn { .from = in });
|
CommonProto::ReadConn { .from = in });
|
||||||
in >> drv.platform >> drv.builder;
|
in >> drv.platform >> drv.builder;
|
||||||
drv.args = readStrings<Strings>(in);
|
drv.args = readStrings<Strings>(in);
|
||||||
|
|
||||||
|
@ -944,8 +944,8 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
},
|
},
|
||||||
}, i.second.raw);
|
}, i.second.raw);
|
||||||
}
|
}
|
||||||
WorkerProto::write(store,
|
CommonProto::write(store,
|
||||||
WorkerProto::WriteConn { .to = out },
|
CommonProto::WriteConn { .to = out },
|
||||||
drv.inputSrcs);
|
drv.inputSrcs);
|
||||||
out << drv.platform << drv.builder << drv.args;
|
out << drv.platform << drv.builder << drv.args;
|
||||||
out << drv.env.size();
|
out << drv.env.size();
|
||||||
|
|
|
@ -51,11 +51,8 @@ typename DerivedPathMap<V>::ChildNode * DerivedPathMap<V>::findSlot(const Single
|
||||||
|
|
||||||
// instantiations
|
// instantiations
|
||||||
|
|
||||||
#include "create-derivation-and-realise-goal.hh"
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
template struct DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>;
|
|
||||||
|
|
||||||
GENERATE_CMP_EXT(
|
GENERATE_CMP_EXT(
|
||||||
template<>,
|
template<>,
|
||||||
DerivedPathMap<std::set<std::string>>::ChildNode,
|
DerivedPathMap<std::set<std::string>>::ChildNode,
|
||||||
|
|
|
@ -20,11 +20,8 @@ namespace nix {
|
||||||
*
|
*
|
||||||
* @param V A type to instantiate for each output. It should probably
|
* @param V A type to instantiate for each output. It should probably
|
||||||
* should be an "optional" type so not every interior node has to have a
|
* should be an "optional" type so not every interior node has to have a
|
||||||
* value. For example, the scheduler uses
|
* value. `* const Something` or `std::optional<Something>` would be
|
||||||
* `DerivedPathMap<std::weak_ptr<CreateDerivationAndRealiseGoal>>` to
|
* good choices for "optional" types.
|
||||||
* remember which goals correspond to which outputs. `* const Something`
|
|
||||||
* or `std::optional<Something>` would also be good choices for
|
|
||||||
* "optional" types.
|
|
||||||
*/
|
*/
|
||||||
template<typename V>
|
template<typename V>
|
||||||
struct DerivedPathMap {
|
struct DerivedPathMap {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#include "serialise.hh"
|
#include "serialise.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "common-protocol.hh"
|
||||||
#include "worker-protocol-impl.hh"
|
#include "common-protocol-impl.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
|
@ -46,8 +46,8 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
teeSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
<< printStorePath(path);
|
<< printStorePath(path);
|
||||||
WorkerProto::write(*this,
|
CommonProto::write(*this,
|
||||||
WorkerProto::WriteConn { .to = teeSink },
|
CommonProto::WriteConn { .to = teeSink },
|
||||||
info->references);
|
info->references);
|
||||||
teeSink
|
teeSink
|
||||||
<< (info->deriver ? printStorePath(*info->deriver) : "")
|
<< (info->deriver ? printStorePath(*info->deriver) : "")
|
||||||
|
@ -76,8 +76,8 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
|
|
||||||
//Activity act(*logger, lvlInfo, "importing path '%s'", info.path);
|
//Activity act(*logger, lvlInfo, "importing path '%s'", info.path);
|
||||||
|
|
||||||
auto references = WorkerProto::Serialise<StorePathSet>::read(*this,
|
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
|
||||||
WorkerProto::ReadConn { .from = source });
|
CommonProto::ReadConn { .from = source });
|
||||||
auto deriver = readString(source);
|
auto deriver = readString(source);
|
||||||
auto narHash = hashString(htSHA256, saved.s);
|
auto narHash = hashString(htSHA256, saved.s);
|
||||||
|
|
||||||
|
|
|
@ -776,7 +776,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Synchronisation point for testing, see tests/gc-concurrent.sh. */
|
/* Synchronisation point for testing, see tests/functional/gc-concurrent.sh. */
|
||||||
if (auto p = getEnv("_NIX_TEST_GC_SYNC"))
|
if (auto p = getEnv("_NIX_TEST_GC_SYNC"))
|
||||||
readFile(*p);
|
readFile(*p);
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "config.hh"
|
#include "config.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
#include "experimental-features.hh"
|
||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <limits>
|
#include <limits>
|
||||||
|
@ -343,7 +344,7 @@ public:
|
||||||
users in `build-users-group`.
|
users in `build-users-group`.
|
||||||
|
|
||||||
UIDs are allocated starting at 872415232 (0x34000000) on Linux and 56930 on macOS.
|
UIDs are allocated starting at 872415232 (0x34000000) on Linux and 56930 on macOS.
|
||||||
)"};
|
)", {}, true, Xp::AutoAllocateUids};
|
||||||
|
|
||||||
Setting<uint32_t> startId{this,
|
Setting<uint32_t> startId{this,
|
||||||
#if __linux__
|
#if __linux__
|
||||||
|
@ -697,19 +698,40 @@ public:
|
||||||
getDefaultSystemFeatures(),
|
getDefaultSystemFeatures(),
|
||||||
"system-features",
|
"system-features",
|
||||||
R"(
|
R"(
|
||||||
A set of system “features” supported by this machine, e.g. `kvm`.
|
A set of system “features” supported by this machine.
|
||||||
Derivations can express a dependency on such features through the
|
|
||||||
derivation attribute `requiredSystemFeatures`. For example, the
|
|
||||||
attribute
|
|
||||||
|
|
||||||
requiredSystemFeatures = [ "kvm" ];
|
This complements the [`system`](#conf-system) and [`extra-platforms`](#conf-extra-platforms) configuration options and the corresponding [`system`](@docroot@/language/derivations.md#attr-system) attribute on derivations.
|
||||||
|
|
||||||
ensures that the derivation can only be built on a machine with the
|
A derivation can require system features in the [`requiredSystemFeatures` attribute](@docroot@/language/advanced-attributes.md#adv-attr-requiredSystemFeatures), and the machine to build the derivation must have them.
|
||||||
`kvm` feature.
|
|
||||||
|
|
||||||
This setting by default includes `kvm` if `/dev/kvm` is accessible,
|
System features are user-defined, but Nix sets the following defaults:
|
||||||
and the pseudo-features `nixos-test`, `benchmark` and `big-parallel`
|
|
||||||
that are used in Nixpkgs to route builds to specific machines.
|
- `kvm`
|
||||||
|
|
||||||
|
Included by default if `/dev/kvm` is accessible.
|
||||||
|
|
||||||
|
- `nixos-test`, `benchmark`, `big-parallel`
|
||||||
|
|
||||||
|
These historical pseudo-features are always enabled for backwards compatibility, as they are used in Nixpkgs to route Hydra builds to specific machines.
|
||||||
|
|
||||||
|
- `ca-derivations`
|
||||||
|
|
||||||
|
Included by default if the [`ca-derivations` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-ca-derivations) is enabled.
|
||||||
|
|
||||||
|
This system feature is implicitly required by derivations with the [`__contentAddressed` attribute](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed).
|
||||||
|
|
||||||
|
- `recursive-nix`
|
||||||
|
|
||||||
|
Included by default if the [`recursive-nix` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-recursive-nix) is enabled.
|
||||||
|
|
||||||
|
- `uid-range`
|
||||||
|
|
||||||
|
On Linux, Nix can run builds in a user namespace where they run as root (UID 0) and have 65,536 UIDs available.
|
||||||
|
This is primarily useful for running containers such as `systemd-nspawn` inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
|
||||||
|
|
||||||
|
[nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix.
|
||||||
|
|
||||||
|
Included by default on Linux if the [`auto-allocate-uids`](#conf-auto-allocate-uids) setting is enabled.
|
||||||
)", {}, false};
|
)", {}, false};
|
||||||
|
|
||||||
Setting<Strings> substituters{
|
Setting<Strings> substituters{
|
||||||
|
@ -1031,6 +1053,25 @@ public:
|
||||||
```
|
```
|
||||||
)"
|
)"
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Setting<StringMap> impureEnv {this, {}, "impure-env",
|
||||||
|
R"(
|
||||||
|
A list of items, each in the format of:
|
||||||
|
|
||||||
|
- `name=value`: Set environment variable `name` to `value`.
|
||||||
|
|
||||||
|
If the user is trusted (see `trusted-users` option), when building
|
||||||
|
a fixed-output derivation, environment variables set in this option
|
||||||
|
will be passed to the builder if they are listed in [`impureEnvVars`](@docroot@/language/advanced-attributes.md##adv-attr-impureEnvVars).
|
||||||
|
|
||||||
|
This option is useful for, e.g., setting `https_proxy` for
|
||||||
|
fixed-output derivations and in a multi-user Nix installation, or
|
||||||
|
setting private access tokens when fetching a private repository.
|
||||||
|
)",
|
||||||
|
{}, // aliases
|
||||||
|
true, // document default
|
||||||
|
Xp::ConfigurableImpureEnv
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,8 @@
|
||||||
#include "build-result.hh"
|
#include "build-result.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "path-with-outputs.hh"
|
#include "path-with-outputs.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "common-protocol.hh"
|
||||||
#include "worker-protocol-impl.hh"
|
#include "common-protocol-impl.hh"
|
||||||
#include "ssh.hh"
|
#include "ssh.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "callback.hh"
|
#include "callback.hh"
|
||||||
|
@ -50,37 +50,37 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
bool good = true;
|
bool good = true;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Coercion to `WorkerProto::ReadConn`. This makes it easy to use the
|
* Coercion to `CommonProto::ReadConn`. This makes it easy to use the
|
||||||
* factored out worker protocol searlizers with a
|
* factored out common protocol serialisers with a
|
||||||
* `LegacySSHStore::Connection`.
|
* `LegacySSHStore::Connection`.
|
||||||
*
|
*
|
||||||
* The worker protocol connection types are unidirectional, unlike
|
* The common protocol connection types are unidirectional, unlike
|
||||||
* this type.
|
* this type.
|
||||||
*
|
*
|
||||||
* @todo Use server protocol serializers, not worker protocol
|
* @todo Use server protocol serializers, not common protocol
|
||||||
* serializers, once we have made that distiction.
|
* serializers, once we have made that distiction.
|
||||||
*/
|
*/
|
||||||
operator WorkerProto::ReadConn ()
|
operator CommonProto::ReadConn ()
|
||||||
{
|
{
|
||||||
return WorkerProto::ReadConn {
|
return CommonProto::ReadConn {
|
||||||
.from = from,
|
.from = from,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Coercion to `WorkerProto::WriteConn`. This makes it easy to use the
|
* Coercion to `CommonProto::WriteConn`. This makes it easy to use the
|
||||||
* factored out worker protocol searlizers with a
|
* factored out common protocol searlizers with a
|
||||||
* `LegacySSHStore::Connection`.
|
* `LegacySSHStore::Connection`.
|
||||||
*
|
*
|
||||||
* The worker protocol connection types are unidirectional, unlike
|
* The common protocol connection types are unidirectional, unlike
|
||||||
* this type.
|
* this type.
|
||||||
*
|
*
|
||||||
* @todo Use server protocol serializers, not worker protocol
|
* @todo Use server protocol serializers, not common protocol
|
||||||
* serializers, once we have made that distiction.
|
* serializers, once we have made that distiction.
|
||||||
*/
|
*/
|
||||||
operator WorkerProto::WriteConn ()
|
operator CommonProto::WriteConn ()
|
||||||
{
|
{
|
||||||
return WorkerProto::WriteConn {
|
return CommonProto::WriteConn {
|
||||||
.to = to,
|
.to = to,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -183,7 +183,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
auto deriver = readString(conn->from);
|
auto deriver = readString(conn->from);
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info->deriver = parseStorePath(deriver);
|
info->deriver = parseStorePath(deriver);
|
||||||
info->references = WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
|
info->references = CommonProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||||
readLongLong(conn->from); // download size
|
readLongLong(conn->from); // download size
|
||||||
info->narSize = readLongLong(conn->from);
|
info->narSize = readLongLong(conn->from);
|
||||||
|
|
||||||
|
@ -217,7 +217,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
WorkerProto::write(*this, *conn, info.references);
|
CommonProto::write(*this, *conn, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< info.registrationTime
|
<< info.registrationTime
|
||||||
<< info.narSize
|
<< info.narSize
|
||||||
|
@ -246,7 +246,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
conn->to
|
conn->to
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
<< printStorePath(info.path);
|
<< printStorePath(info.path);
|
||||||
WorkerProto::write(*this, *conn, info.references);
|
CommonProto::write(*this, *conn, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< 0
|
<< 0
|
||||||
|
@ -331,7 +331,7 @@ public:
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
|
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
|
||||||
conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime;
|
conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime;
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) {
|
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) {
|
||||||
auto builtOutputs = WorkerProto::Serialise<DrvOutputs>::read(*this, *conn);
|
auto builtOutputs = CommonProto::Serialise<DrvOutputs>::read(*this, *conn);
|
||||||
for (auto && [output, realisation] : builtOutputs)
|
for (auto && [output, realisation] : builtOutputs)
|
||||||
status.builtOutputs.insert_or_assign(
|
status.builtOutputs.insert_or_assign(
|
||||||
std::move(output.outputName),
|
std::move(output.outputName),
|
||||||
|
@ -409,10 +409,10 @@ public:
|
||||||
conn->to
|
conn->to
|
||||||
<< ServeProto::Command::QueryClosure
|
<< ServeProto::Command::QueryClosure
|
||||||
<< includeOutputs;
|
<< includeOutputs;
|
||||||
WorkerProto::write(*this, *conn, paths);
|
CommonProto::write(*this, *conn, paths);
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
|
|
||||||
for (auto & i : WorkerProto::Serialise<StorePathSet>::read(*this, *conn))
|
for (auto & i : CommonProto::Serialise<StorePathSet>::read(*this, *conn))
|
||||||
out.insert(i);
|
out.insert(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -425,10 +425,10 @@ public:
|
||||||
<< ServeProto::Command::QueryValidPaths
|
<< ServeProto::Command::QueryValidPaths
|
||||||
<< false // lock
|
<< false // lock
|
||||||
<< maybeSubstitute;
|
<< maybeSubstitute;
|
||||||
WorkerProto::write(*this, *conn, paths);
|
CommonProto::write(*this, *conn, paths);
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
|
|
||||||
return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
|
return CommonProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||||
}
|
}
|
||||||
|
|
||||||
void connect() override
|
void connect() override
|
||||||
|
|
162
src/libstore/length-prefixed-protocol-helper.hh
Normal file
162
src/libstore/length-prefixed-protocol-helper.hh
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
#pragma once
|
||||||
|
/**
|
||||||
|
* @file Reusable serialisers for serialization container types in a
|
||||||
|
* length-prefixed manner.
|
||||||
|
*
|
||||||
|
* Used by both the Worker and Serve protocols.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reusable serialisers for serialization container types in a
|
||||||
|
* length-prefixed manner.
|
||||||
|
*
|
||||||
|
* @param T The type of the collection being serialised
|
||||||
|
*
|
||||||
|
* @param Inner This the most important parameter; this is the "inner"
|
||||||
|
* protocol. The user of this will substitute `MyProtocol` or similar
|
||||||
|
* when making a `MyProtocol::Serialiser<Collection<T>>`. Note that the
|
||||||
|
* inside is allowed to call to call `Inner::Serialiser` on different
|
||||||
|
* types. This is especially important for `std::map` which doesn't have
|
||||||
|
* a single `T` but one `K` and one `V`.
|
||||||
|
*/
|
||||||
|
template<class Inner, typename T>
|
||||||
|
struct LengthPrefixedProtoHelper;
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* \typedef LengthPrefixedProtoHelper::S
|
||||||
|
*
|
||||||
|
* Read this as simply `using S = Inner::Serialise;`.
|
||||||
|
*
|
||||||
|
* It would be nice to use that directly, but C++ doesn't seem to allow
|
||||||
|
* it. The `typename` keyword needed to refer to `Inner` seems to greedy
|
||||||
|
* (low precedence), and then C++ complains that `Serialise` is not a
|
||||||
|
* type parameter but a real type.
|
||||||
|
*
|
||||||
|
* Making this `S` alias seems to be the only way to avoid these issues.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \
|
||||||
|
struct LengthPrefixedProtoHelper< Inner, T > \
|
||||||
|
{ \
|
||||||
|
static T read(const Store & store, typename Inner::ReadConn conn); \
|
||||||
|
static void write(const Store & store, typename Inner::WriteConn conn, const T & str); \
|
||||||
|
private: \
|
||||||
|
template<typename U> using S = typename Inner::template Serialise<U>; \
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename T>
|
||||||
|
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::vector<T>);
|
||||||
|
|
||||||
|
template<class Inner, typename T>
|
||||||
|
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::set<T>);
|
||||||
|
|
||||||
|
template<class Inner, typename... Ts>
|
||||||
|
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::tuple<Ts...>);
|
||||||
|
|
||||||
|
template<class Inner, typename K, typename V>
|
||||||
|
#define _X std::map<K, V>
|
||||||
|
LENGTH_PREFIXED_PROTO_HELPER(Inner, _X);
|
||||||
|
#undef _X
|
||||||
|
|
||||||
|
template<class Inner, typename T>
|
||||||
|
std::vector<T>
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::vector<T>>::read(
|
||||||
|
const Store & store, typename Inner::ReadConn conn)
|
||||||
|
{
|
||||||
|
std::vector<T> resSet;
|
||||||
|
auto size = readNum<size_t>(conn.from);
|
||||||
|
while (size--) {
|
||||||
|
resSet.push_back(S<T>::read(store, conn));
|
||||||
|
}
|
||||||
|
return resSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename T>
|
||||||
|
void
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::vector<T>>::write(
|
||||||
|
const Store & store, typename Inner::WriteConn conn, const std::vector<T> & resSet)
|
||||||
|
{
|
||||||
|
conn.to << resSet.size();
|
||||||
|
for (auto & key : resSet) {
|
||||||
|
S<T>::write(store, conn, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename T>
|
||||||
|
std::set<T>
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::set<T>>::read(
|
||||||
|
const Store & store, typename Inner::ReadConn conn)
|
||||||
|
{
|
||||||
|
std::set<T> resSet;
|
||||||
|
auto size = readNum<size_t>(conn.from);
|
||||||
|
while (size--) {
|
||||||
|
resSet.insert(S<T>::read(store, conn));
|
||||||
|
}
|
||||||
|
return resSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename T>
|
||||||
|
void
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::set<T>>::write(
|
||||||
|
const Store & store, typename Inner::WriteConn conn, const std::set<T> & resSet)
|
||||||
|
{
|
||||||
|
conn.to << resSet.size();
|
||||||
|
for (auto & key : resSet) {
|
||||||
|
S<T>::write(store, conn, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename K, typename V>
|
||||||
|
std::map<K, V>
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::map<K, V>>::read(
|
||||||
|
const Store & store, typename Inner::ReadConn conn)
|
||||||
|
{
|
||||||
|
std::map<K, V> resMap;
|
||||||
|
auto size = readNum<size_t>(conn.from);
|
||||||
|
while (size--) {
|
||||||
|
auto k = S<K>::read(store, conn);
|
||||||
|
auto v = S<V>::read(store, conn);
|
||||||
|
resMap.insert_or_assign(std::move(k), std::move(v));
|
||||||
|
}
|
||||||
|
return resMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename K, typename V>
|
||||||
|
void
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::map<K, V>>::write(
|
||||||
|
const Store & store, typename Inner::WriteConn conn, const std::map<K, V> & resMap)
|
||||||
|
{
|
||||||
|
conn.to << resMap.size();
|
||||||
|
for (auto & i : resMap) {
|
||||||
|
S<K>::write(store, conn, i.first);
|
||||||
|
S<V>::write(store, conn, i.second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename... Ts>
|
||||||
|
std::tuple<Ts...>
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::tuple<Ts...>>::read(
|
||||||
|
const Store & store, typename Inner::ReadConn conn)
|
||||||
|
{
|
||||||
|
return std::tuple<Ts...> {
|
||||||
|
S<Ts>::read(store, conn)...,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Inner, typename... Ts>
|
||||||
|
void
|
||||||
|
LengthPrefixedProtoHelper<Inner, std::tuple<Ts...>>::write(
|
||||||
|
const Store & store, typename Inner::WriteConn conn, const std::tuple<Ts...> & res)
|
||||||
|
{
|
||||||
|
std::apply([&]<typename... Us>(const Us &... args) {
|
||||||
|
(S<Us>::write(store, conn, args), ...);
|
||||||
|
}, res);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -3,6 +3,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)";
|
static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-_\?=][0-9a-zA-Z\+\-\._\?=]*)";
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,8 @@ static void checkName(std::string_view path, std::string_view name)
|
||||||
if (name.size() > StorePath::MaxPathLen)
|
if (name.size() > StorePath::MaxPathLen)
|
||||||
throw BadStorePath("store path '%s' has a name longer than %d characters",
|
throw BadStorePath("store path '%s' has a name longer than %d characters",
|
||||||
path, StorePath::MaxPathLen);
|
path, StorePath::MaxPathLen);
|
||||||
|
if (name[0] == '.')
|
||||||
|
throw BadStorePath("store path '%s' starts with illegal character '.'", path);
|
||||||
// See nameRegexStr for the definition
|
// See nameRegexStr for the definition
|
||||||
for (auto c : name)
|
for (auto c : name)
|
||||||
if (!((c >= '0' && c <= '9')
|
if (!((c >= '0' && c <= '9')
|
||||||
|
|
|
@ -183,7 +183,7 @@ void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bo
|
||||||
iterDropUntil(gens, i, [&](auto & g) { return g.number == curGen; });
|
iterDropUntil(gens, i, [&](auto & g) { return g.number == curGen; });
|
||||||
|
|
||||||
// Skip over `max` generations, preserving them
|
// Skip over `max` generations, preserving them
|
||||||
for (auto keep = 0; i != gens.rend() && keep < max; ++i, ++keep);
|
for (GenerationNumber keep = 0; i != gens.rend() && keep < max; ++i, ++keep);
|
||||||
|
|
||||||
// Delete the rest
|
// Delete the rest
|
||||||
for (; i != gens.rend(); ++i)
|
for (; i != gens.rend(); ++i)
|
||||||
|
|
|
@ -153,19 +153,22 @@ struct StoreConfig : public Config
|
||||||
|
|
||||||
Setting<int> priority{this, 0, "priority",
|
Setting<int> priority{this, 0, "priority",
|
||||||
R"(
|
R"(
|
||||||
Priority of this store when used as a substituter. A lower value means a higher priority.
|
Priority of this store when used as a [substituter](@docroot@/command-ref/conf-file.md#conf-substituters).
|
||||||
|
A lower value means a higher priority.
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<bool> wantMassQuery{this, false, "want-mass-query",
|
Setting<bool> wantMassQuery{this, false, "want-mass-query",
|
||||||
R"(
|
R"(
|
||||||
Whether this store (when used as a substituter) can be
|
Whether this store can be queried efficiently for path validity when used as a [substituter](@docroot@/command-ref/conf-file.md#conf-substituters).
|
||||||
queried efficiently for path validity.
|
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
|
Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
|
||||||
"system-features",
|
"system-features",
|
||||||
"Optional features that the system this store builds on implements (like \"kvm\")."};
|
R"(
|
||||||
|
Optional [system features](@docroot@/command-ref/conf-file.md#conf-system-features) available on the system this store uses to build derivations.
|
||||||
|
|
||||||
|
Example: `"kvm"`
|
||||||
|
)" };
|
||||||
};
|
};
|
||||||
|
|
||||||
class Store : public std::enable_shared_from_this<Store>, public virtual StoreConfig
|
class Store : public std::enable_shared_from_this<Store>, public virtual StoreConfig
|
||||||
|
|
23
src/libstore/tests/characterization.hh
Normal file
23
src/libstore/tests/characterization.hh
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The path to the `unit-test-data` directory. See the contributing
|
||||||
|
* guide in the manual for further details.
|
||||||
|
*/
|
||||||
|
static Path getUnitTestData() {
|
||||||
|
return getEnv("_NIX_TEST_UNIT_DATA").value();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether we should update "golden masters" instead of running tests
|
||||||
|
* against them. See the contributing guide in the manual for further
|
||||||
|
* details.
|
||||||
|
*/
|
||||||
|
static bool testAccept() {
|
||||||
|
return getEnv("_NIX_TEST_ACCEPT") == "1";
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue