Merge remote-tracking branch 'origin/master' into tarball-cache

This commit is contained in:
Eelco Dolstra 2024-02-14 14:45:19 +01:00
commit 54354eaecf
368 changed files with 9899 additions and 4992 deletions

30
.clang-format Normal file
View file

@ -0,0 +1,30 @@
BasedOnStyle: LLVM
IndentWidth: 4
BreakBeforeBraces: Custom
BraceWrapping:
AfterStruct: true
AfterClass: true
AfterFunction: true
AfterUnion: true
SplitEmptyRecord: false
PointerAlignment: Middle
FixNamespaceComments: false
SortIncludes: Never
#IndentPPDirectives: BeforeHash
SpaceAfterCStyleCast: true
SpaceAfterTemplateKeyword: false
AccessModifierOffset: -4
AlignAfterOpenBracket: AlwaysBreak
AlignEscapedNewlines: DontAlign
ColumnLimit: 120
BreakStringLiterals: false
BitFieldColonSpacing: None
AllowShortFunctionsOnASingleLine: Empty
AlwaysBreakTemplateDeclarations: Yes
BinPackParameters: false
BreakConstructorInitializers: BeforeComma
EmptyLineAfterAccessModifier: Leave # change to always/never later?
EmptyLineBeforeAccessModifier: Leave
#PackConstructorInitializers: BinPack
BreakBeforeBinaryOperators: NonAssignment
AlwaysBreakBeforeMultilineStrings: true

3
.clang-tidy Normal file
View file

@ -0,0 +1,3 @@
# We use pointers to aggregates in a couple of places, intentionally.
# void * would look weird.
Checks: '-bugprone-sizeof-expression'

View file

@ -21,7 +21,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Create backport PRs - name: Create backport PRs
# should be kept in sync with `version` # should be kept in sync with `version`
uses: zeebe-io/backport-action@v2.3.0 uses: zeebe-io/backport-action@v2.4.1
with: with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action # Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -20,12 +20,12 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v24 - uses: cachix/install-nix-action@v25
with: with:
# The sandbox would otherwise be disabled by default on Darwin # The sandbox would otherwise be disabled by default on Darwin
extra_nix_config: "sandbox = true" extra_nix_config: "sandbox = true"
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v13 - uses: cachix/cachix-action@v14
if: needs.check_secrets.outputs.cachix == 'true' if: needs.check_secrets.outputs.cachix == 'true'
with: with:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'
@ -62,10 +62,10 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v24 - uses: cachix/install-nix-action@v25
with: with:
install_url: https://releases.nixos.org/nix/nix-2.13.3/install install_url: https://releases.nixos.org/nix/nix-2.13.3/install
- uses: cachix/cachix-action@v13 - uses: cachix/cachix-action@v14
with: with:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
@ -84,7 +84,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v24 - uses: cachix/install-nix-action@v25
with: with:
install_url: '${{needs.installer.outputs.installerURL}}' install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
@ -114,12 +114,12 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v24 - uses: cachix/install-nix-action@v25
with: with:
install_url: https://releases.nixos.org/nix/nix-2.13.3/install install_url: https://releases.nixos.org/nix/nix-2.13.3/install
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v13 - uses: cachix/cachix-action@v14
if: needs.check_secrets.outputs.cachix == 'true' if: needs.check_secrets.outputs.cachix == 'true'
with: with:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'

2
.gitignore vendored
View file

@ -94,6 +94,7 @@ perl/Makefile.config
/tests/functional/ca/config.nix /tests/functional/ca/config.nix
/tests/functional/dyn-drv/config.nix /tests/functional/dyn-drv/config.nix
/tests/functional/repl-result-out /tests/functional/repl-result-out
/tests/functional/debugger-test-out
/tests/functional/test-libstoreconsumer/test-libstoreconsumer /tests/functional/test-libstoreconsumer/test-libstoreconsumer
# /tests/functional/lang/ # /tests/functional/lang/
@ -141,6 +142,7 @@ compile_commands.json
nix-rust/target nix-rust/target
result result
result-*
# IDE # IDE
.vscode/ .vscode/

View file

@ -1 +1 @@
2.20.0 2.21.0

View file

@ -63,7 +63,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
- Functional tests [`tests/functional/**.sh`](./tests/functional) - Functional tests [`tests/functional/**.sh`](./tests/functional)
- Unit tests [`src/*/tests`](./src/) - Unit tests [`src/*/tests`](./src/)
- Integration tests [`tests/nixos/*`](./tests/nixos) - Integration tests [`tests/nixos/*`](./tests/nixos)
- [ ] User documentation in the [manual](..doc/manual/src) - [ ] User documentation in the [manual](./doc/manual/src)
- [ ] API documentation in header files - [ ] API documentation in header files
- [ ] Code and comments are self-explanatory - [ ] Code and comments are self-explanatory
- [ ] Commit message explains **why** the change was made - [ ] Commit message explains **why** the change was made

View file

@ -1,8 +1,12 @@
# External build directory support
include mk/build-dir.mk include mk/build-dir.mk
-include $(buildprefix)Makefile.config -include $(buildprefix)Makefile.config
clean-files += $(buildprefix)Makefile.config clean-files += $(buildprefix)Makefile.config
# List makefiles
ifeq ($(ENABLE_BUILD), yes) ifeq ($(ENABLE_BUILD), yes)
makefiles = \ makefiles = \
mk/precompiled-headers.mk \ mk/precompiled-headers.mk \
@ -43,6 +47,19 @@ makefiles += \
tests/functional/plugins/local.mk tests/functional/plugins/local.mk
endif endif
# Some makefiles require access to built programs and must be included late.
makefiles-late =
ifeq ($(ENABLE_DOC_GEN), yes)
makefiles-late += doc/manual/local.mk
endif
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
makefiles-late += doc/internal-api/local.mk
endif
# Miscellaneous global Flags
OPTIMIZE = 1 OPTIMIZE = 1
ifeq ($(OPTIMIZE), 1) ifeq ($(OPTIMIZE), 1)
@ -52,9 +69,29 @@ else
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
endif endif
include mk/platform.mk
ifdef HOST_WINDOWS
# Windows DLLs are stricter about symbol visibility than Unix shared
# objects --- see https://gcc.gnu.org/wiki/Visibility for details.
# This is a temporary sledgehammer to export everything like on Unix,
# and not detail with this yet.
#
# TODO do not do this, and instead do fine-grained export annotations.
GLOBAL_LDFLAGS += -Wl,--export-all-symbols
endif
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
# Include the main lib, causing rules to be defined
include mk/lib.mk include mk/lib.mk
# Must be included after `mk/lib.mk` so isn't the default target. # Fallback stub rules for better UX when things are disabled
#
# These must be defined after `mk/lib.mk`. Otherwise the first rule
# incorrectly becomes the default target.
ifneq ($(ENABLE_UNIT_TESTS), yes) ifneq ($(ENABLE_UNIT_TESTS), yes)
.PHONY: check .PHONY: check
check: check:
@ -69,25 +106,18 @@ installcheck:
@exit 1 @exit 1
endif endif
# Must be included after `mk/lib.mk` so rules refer to variables defined # Documentation fallback stub rules.
# by the library. Rules are not "lazy" like variables, unfortunately.
ifeq ($(ENABLE_DOC_GEN), yes) ifneq ($(ENABLE_DOC_GEN), yes)
$(eval $(call include-sub-makefile, doc/manual/local.mk))
else
.PHONY: manual-html manpages .PHONY: manual-html manpages
manual-html manpages: manual-html manpages:
@echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'." @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
@exit 1 @exit 1
endif endif
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) ifneq ($(ENABLE_INTERNAL_API_DOCS), yes)
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
else
.PHONY: internal-api-html .PHONY: internal-api-html
internal-api-html: internal-api-html:
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
@exit 1 @exit 1
endif endif
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src

View file

@ -29,7 +29,6 @@ LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@ PACKAGE_VERSION = @PACKAGE_VERSION@
RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@
SHELL = @bash@ SHELL = @bash@
SODIUM_LIBS = @SODIUM_LIBS@ SODIUM_LIBS = @SODIUM_LIBS@
SQLITE3_LIBS = @SQLITE3_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@

View file

@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
# State should be stored in /nix/var, unless the user overrides it explicitly. # State should be stored in /nix/var, unless the user overrides it explicitly.
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
# Assign a default value to C{,XX}FLAGS as the default configure script sets them
# to -O2 otherwise, which we don't want to have hardcoded
CFLAGS=${CFLAGS-""}
CXXFLAGS=${CXXFLAGS-""}
AC_PROG_CC AC_PROG_CC
AC_PROG_CXX AC_PROG_CXX
@ -160,7 +164,7 @@ AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation
AC_SUBST(ENABLE_DOC_GEN) AC_SUBST(ENABLE_DOC_GEN)
AS_IF( AS_IF(
[test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"], [test "$ENABLE_BUILD" == "no" && test "$ENABLE_DOC_GEN" == "yes"],
[AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])]) [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])])
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. # Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
@ -251,17 +255,25 @@ PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CX
# Look for libcurl, a required dependency. # Look for libcurl, a required dependency.
PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"]) PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"])
# Look for editline, a required dependency. # Look for editline or readline, a required dependency.
# The the libeditline.pc file was added only in libeditline >= 1.15.2, # The the libeditline.pc file was added only in libeditline >= 1.15.2,
# see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607, # see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607,
# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for # Older versions are no longer supported.
# editline.h when the pkg-config approach fails. AC_ARG_WITH(
PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [ [readline-flavor],
AC_CHECK_HEADERS([editline.h], [true], AS_HELP_STRING([--with-readline-flavor],[Which library to use for nice line editting with the Nix language REPL" [default=editline]]),
[AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])]) [readline_flavor=$withval],
AC_SEARCH_LIBS([readline read_history], [editline], [], [readline_flavor=editline])
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])]) AS_CASE(["$readline_flavor"],
]) [editline], [
readline_flavor_pc=libeditline
],
[readline], [
readline_flavor_pc=readline
AC_DEFINE([USE_READLINE], [1], [Use readline instead of editline])
],
[AC_MSG_ERROR([bad value "$readline_flavor" for --with-readline-flavor, must be one of: editline, readline])])
PKG_CHECK_MODULES([EDITLINE], [$readline_flavor_pc], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"])
# Look for libsodium. # Look for libsodium.
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"]) PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
@ -308,7 +320,12 @@ AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
# Optional dependencies for better normalizing file system data # Optional dependencies for better normalizing file system data
AC_CHECK_HEADERS([sys/xattr.h]) AC_CHECK_HEADERS([sys/xattr.h])
AC_CHECK_FUNCS([llistxattr lremovexattr]) AS_IF([test "$ac_cv_header_sys_xattr_h" = "yes"],[
AC_CHECK_FUNCS([llistxattr lremovexattr])
AS_IF([test "$ac_cv_func_llistxattr" = "yes" && test "$ac_cv_func_lremovexattr" = "yes"],[
AC_DEFINE([HAVE_ACL_SUPPORT], [1], [Define if we can manipulate file system Access Control Lists])
])
])
# Look for aws-cpp-sdk-s3. # Look for aws-cpp-sdk-s3.
AC_LANG_PUSH(C++) AC_LANG_PUSH(C++)
@ -338,29 +355,10 @@ fi
AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
# Look for gtest. # Look for gtest.
PKG_CHECK_MODULES([GTEST], [gtest_main]) PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main])
# Look for rapidcheck. # Look for rapidcheck.
AC_ARG_VAR([RAPIDCHECK_HEADERS], [include path of gtest headers shipped by RAPIDCHECK]) PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest])
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
AC_LANG_PUSH(C++)
AC_SUBST(RAPIDCHECK_HEADERS)
[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"]
[LIBS="-lrapidcheck -lgtest $LIBS"]
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
dnl AC_CHECK_LIB doesn't work for C++ libs with mangled symbols
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
]], [[
return RUN_ALL_TESTS();
]])
],
[],
[AC_MSG_ERROR([librapidcheck is not found.])])
AC_LANG_POP(C++)
]) ])
@ -369,7 +367,20 @@ PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
# Look for lowdown library. # Look for lowdown library.
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"]) AC_ARG_ENABLE([markdown], AS_HELP_STRING([--enable-markdown], [Enable Markdown rendering in the Nix binary (requires lowdown) [default=auto]]),
enable_markdown=$enableval, enable_markdown=auto)
AS_CASE(["$enable_markdown"],
[yes | auto], [
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [
CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"
have_lowdown=1
AC_DEFINE(HAVE_LOWDOWN, 1, [Whether lowdown is available and should be used for Markdown rendering.])
], [
AS_IF([test "x$enable_markdown" == "xyes"], [AC_MSG_ERROR([--enable-markdown was specified, but lowdown was not found.])])
])
],
[no], [have_lowdown=],
[AC_MSG_ERROR([bad value "$enable_markdown" for --enable-markdown, must be one of: yes, no, auto])])
# Look for libgit2. # Look for libgit2.

View file

@ -81,7 +81,7 @@ EXPAND_ONLY_PREDEF = YES
# RECURSIVE has no effect here. # RECURSIVE has no effect here.
# This tag requires that the tag SEARCH_INCLUDES is set to YES. # This tag requires that the tag SEARCH_INCLUDES is set to YES.
INCLUDE_PATH = @RAPIDCHECK_HEADERS@ INCLUDE_PATH =
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
# tag can be used to specify a list of macro names that should be expanded. The # tag can be used to specify a list of macro names that should be expanded. The

View file

@ -6,6 +6,8 @@ additional-css = ["custom.css"]
additional-js = ["redirects.js"] additional-js = ["redirects.js"]
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
git-repository-url = "https://github.com/NixOS/nix" git-repository-url = "https://github.com/NixOS/nix"
fold.enable = true
fold.level = 1
[preprocessor.anchors] [preprocessor.anchors]
renderers = ["html"] renderers = ["html"]

View file

@ -93,9 +93,6 @@ let
maybeProse = maybeProse =
# FIXME: this is a horrible hack to keep `nix help-stores` working. # FIXME: this is a horrible hack to keep `nix help-stores` working.
# the correct answer to this is to remove that command and replace it
# by statically generated manpages or the output of something like `nix
# store info <store type>`.
let let
help-stores = '' help-stores = ''
${index} ${index}
@ -121,7 +118,7 @@ let
}; };
in in
optionalString (details ? doc) ( optionalString (details ? doc) (
if match "@store-types@" details.doc != [ ] if match ".*@store-types@.*" details.doc != null
then help-stores then help-stores
else details.doc else details.doc
); );

View file

@ -18,9 +18,10 @@ const redirects = {
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html", "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
"chap-diff-hook": "advanced-topics/diff-hook.html", "chap-diff-hook": "advanced-topics/diff-hook.html",
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered", "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
"chap-distributed-builds": "advanced-topics/distributed-builds.html", "chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
"chap-post-build-hook": "advanced-topics/post-build-hook.html", "chap-post-build-hook": "advanced-topics/post-build-hook.html",
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
"chap-writing-nix-expressions": "language/index.html",
"part-command-ref": "command-ref/command-ref.html", "part-command-ref": "command-ref/command-ref.html",
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
@ -357,7 +358,11 @@ const redirects = {
"one-time-setup": "testing.html#one-time-setup", "one-time-setup": "testing.html#one-time-setup",
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing", "using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
"characterization-testing": "#characterisation-testing-unit", "characterization-testing": "#characterisation-testing-unit",
} },
"glossary.html": {
"gloss-local-store": "store/types/local-store.html",
"gloss-chroot-store": "store/types/local-store.html",
},
}; };
// the following code matches the current page's URL against the set of redirects. // the following code matches the current page's URL against the set of redirects.

View file

@ -1,7 +0,0 @@
---
synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes
prs: 9547
---
If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed.
Previously this only worked for schemes whose URIs used the `://` syntax.

View file

@ -0,0 +1,40 @@
---
synopsis: Concise error printing in `nix repl`
prs: 9928
---
Previously, if an element of a list or attribute set threw an error while
evaluating, `nix repl` would print the entire error (including source location
information) inline. This output was clumsy and difficult to parse:
```
nix-repl> { err = builtins.throw "uh oh!"; }
{ err = «error:
… while calling the 'throw' builtin
at «string»:1:9:
1| { err = builtins.throw "uh oh!"; }
| ^
error: uh oh!»; }
```
Now, only the error message is displayed, making the output much more readable.
```
nix-repl> { err = builtins.throw "uh oh!"; }
{ err = «error: uh oh!»; }
```
However, if the whole expression being evaluated throws an error, source
locations and (if applicable) a stack trace are printed, just like you'd expect:
```
nix-repl> builtins.throw "uh oh!"
error:
… while calling the 'throw' builtin
at «string»:1:1:
1| builtins.throw "uh oh!"
| ^
error: uh oh!
```

View file

@ -1,8 +0,0 @@
---
synopsis: Include cgroup stats when building through the daemon
prs: 9598
---
Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng,
if both sides of the connection are this version of Nix or newer.

View file

@ -0,0 +1,9 @@
---
synopsis: "`--debugger` can now access bindings from `let` expressions"
prs: 9918
issues: 8827.
---
Breakpoints and errors in the bindings of a `let` expression can now access
those bindings in the debugger. Previously, only the body of `let` expressions
could access those bindings.

View file

@ -0,0 +1,25 @@
---
synopsis: The `--debugger` will start more reliably in `let` expressions and function calls
prs: 9917
issues: 6649
---
Previously, if you attempted to evaluate this file with the debugger:
```nix
let
a = builtins.trace "before inner break" (
builtins.break "hello"
);
b = builtins.trace "before outer break" (
builtins.break a
);
in
b
```
Nix would correctly enter the debugger at `builtins.break a`, but if you asked
it to `:continue`, it would skip over the `builtins.break "hello"` expression
entirely.
Now, Nix will correctly enter the debugger at both breakpoints.

View file

@ -1,12 +0,0 @@
---
synopsis: Add new `eval-system` setting
prs: 4093
---
Add a new `eval-system` option.
Unlike `system`, it just overrides the value of `builtins.currentSystem`.
This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system.
In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense.
`eval-system` only takes effect if it is non-empty.
If empty (the default) `system` is used as before, so there is no breakage.

View file

@ -1,23 +0,0 @@
---
synopsis: Rename hash format `base32` to `nix32`
prs: 9452
---
Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for
[Base32](https://en.wikipedia.org/wiki/Base32).
## Deprecation: Use `nix32` instead of `base32` as `toHashFormat`
For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from`
parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value
remains as a deprecated alias for `"base32"`. Please convert your code from:
```nix
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}
```
to
```nix
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}
```

View file

@ -0,0 +1,10 @@
---
synopsis: Store paths are allowed to start with `.`
issues: 912
prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224
---
Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).

View file

@ -1,8 +0,0 @@
---
synopsis: Mounted SSH Store
issues: 7890
prs: 7912
---
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.

View file

@ -1,7 +0,0 @@
---
synopsis: Rename to `nix config show`
issues: 7672
prs: 9477
---
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.

View file

@ -1,6 +0,0 @@
---
synopsis: Fix `nix-env --query --drv-path --json`
prs: 9257
---
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.

View file

@ -1,47 +0,0 @@
---
synopsis: Add `nix hash convert`
prs: 9452
---
New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track
to stabilization! Examples:
- Convert the hash to `nix32`.
```bash
$ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df"
vw46m23bizj4n8afrc0fj19wrp7mj3c0
```
`nix32` is a base32 encoding with a nix-specific character set.
Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input
hash.
- Convert the hash to the `sri` format that includes an algorithm specification:
```bash
nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df"
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
```
or with an explicit `-to` format:
```bash
nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df"
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
```
- Assert the input format of the hash:
```bash
nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32'
nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=
```
The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion.
## Related Deprecations
The following commands are still available but will emit a deprecation warning. Please convert your code to
`nix hash convert`:
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2`
or even just `nix hash convert $hash1 $hash2` instead.

View file

@ -1,6 +0,0 @@
---
synopsis: "`nix profile` now allows referring to elements by human-readable name"
prs: 8678
---
[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version.

View file

@ -0,0 +1,37 @@
---
synopsis: "Visual clutter in `--debugger` is reduced"
prs: 9919
---
Before:
```
info: breakpoint reached
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
nix-repl> :continue
error: uh oh
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
nix-repl>
```
After:
```
info: breakpoint reached
Nix 2.20.0pre20231222_dirty debugger
Type :? for help.
nix-repl> :continue
error: uh oh
nix-repl>
```

View file

@ -1,42 +0,0 @@
---
synopsis: Source locations are printed more consistently in errors
issues: 561
prs: 9555
---
Source location information is now included in error messages more
consistently. Given this code:
```nix
let
attr = {foo = "bar";};
key = {};
in
attr.${key}
```
Previously, Nix would show this unhelpful message when attempting to evaluate
it:
```
error:
… while evaluating an attribute name
error: value is a set while a string was expected
```
Now, the error message displays where the problematic value was found:
```
error:
… while evaluating an attribute name
at bad.nix:4:11:
3| key = {};
4| in attr.${key}
| ^
5|
error: value is a set while a string was expected
```

View file

@ -104,8 +104,12 @@
- [Channels](command-ref/files/channels.md) - [Channels](command-ref/files/channels.md)
- [Default Nix expression](command-ref/files/default-nix-expression.md) - [Default Nix expression](command-ref/files/default-nix-expression.md)
- [Architecture and Design](architecture/architecture.md) - [Architecture and Design](architecture/architecture.md)
- [Protocols](protocols/index.md) - [Formats and Protocols](protocols/index.md)
- [JSON Formats](protocols/json/index.md)
- [Store Object Info](protocols/json/store-object-info.md)
- [Derivation](protocols/json/derivation.md)
- [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md)
- [Store Path Specification](protocols/store-path.md)
- [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md)
- [Glossary](glossary.md) - [Glossary](glossary.md)
- [Contributing](contributing/index.md) - [Contributing](contributing/index.md)
@ -117,6 +121,7 @@
- [C++ style guide](contributing/cxx.md) - [C++ style guide](contributing/cxx.md)
- [Release Notes](release-notes/index.md) - [Release Notes](release-notes/index.md)
{{#include ./SUMMARY-rl-next.md}} {{#include ./SUMMARY-rl-next.md}}
- [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md)
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md) - [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)

View file

@ -36,5 +36,6 @@
/package-management/s3-substituter /store/types/s3-binary-cache-store 301! /package-management/s3-substituter /store/types/s3-binary-cache-store 301!
/protocols/protocols /protocols 301! /protocols/protocols /protocols 301!
/json/* /protocols/json/:splat 301!
/release-notes/release-notes /release-notes 301! /release-notes/release-notes /release-notes 301!

View file

@ -36,16 +36,8 @@ error: cannot connect to 'mac'
then you need to ensure that the `PATH` of non-interactive login shells then you need to ensure that the `PATH` of non-interactive login shells
contains Nix. contains Nix.
> **Warning** The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file.
> For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine:
> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine.
>
> If you cant or dont want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine.
The list of remote machines can be specified on the command line or in
the Nix configuration file. The former is convenient for testing. For
example, the following command allows you to build a derivation for
`x86_64-darwin` on a Linux machine:
```console ```console
$ uname $ uname
@ -60,97 +52,20 @@ $ cat ./result
Darwin Darwin
``` ```
It is possible to specify multiple builders separated by a semicolon or It is possible to specify multiple build machines separated by a semicolon or a newline, e.g.
a newline, e.g.
```console ```console
--builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd' --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
``` ```
Each machine specification consists of the following elements, separated Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g.
by spaces. Only the first element is required. To leave a field at its
default, set it to `-`.
1. The URI of the remote store in the format
`ssh://[username@]hostname`, e.g. `ssh://nix@mac` or `ssh://mac`.
For backward compatibility, `ssh://` may be omitted. The hostname
may be an alias defined in your `~/.ssh/config`.
2. A comma-separated list of Nix platform type identifiers, such as
`x86_64-darwin`. It is possible for a machine to support multiple
platform types, e.g., `i686-linux,x86_64-linux`. If omitted, this
defaults to the local platform type.
3. The SSH identity file to be used to log in to the remote machine. If
omitted, SSH will use its regular identities.
4. The maximum number of builds that Nix will execute in parallel on
the machine. Typically this should be equal to the number of CPU
cores. For instance, the machine `itchy` in the example will execute
up to 8 builds in parallel.
5. The “speed factor”, indicating the relative speed of the machine. If
there are multiple machines of the right type, Nix will prefer the
fastest, taking load into account.
6. A comma-separated list of *supported features*. If a derivation has
the `requiredSystemFeatures` attribute, then Nix will only perform
the derivation on a machine that has the specified features. For
instance, the attribute
```nix
requiredSystemFeatures = [ "kvm" ];
```
will cause the build to be performed on a machine that has the `kvm`
feature.
7. A comma-separated list of *mandatory features*. A machine will only
be used to build a derivation if all of the machines mandatory
features appear in the derivations `requiredSystemFeatures`
attribute.
8. The (base64-encoded) public host key of the remote machine. If omitted, SSH
will use its regular known-hosts file. Specifically, the field is calculated
via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
For example, the machine specification
nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm
nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2
nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 1 2 kvm benchmark
specifies several machines that can perform `i686-linux` builds.
However, `poochie` will only do builds that have the attribute
```nix
requiredSystemFeatures = [ "benchmark" ];
```
or
```nix
requiredSystemFeatures = [ "benchmark" "kvm" ];
```
`itchy` cannot do builds that require `kvm`, but `scratchy` does support
such builds. For regular builds, `itchy` will be preferred over
`scratchy` because it has a higher speed factor.
Remote builders can also be configured in `nix.conf`, e.g.
builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
Finally, remote builders can be configured in a separate configuration Finally, remote build machines can be configured in a separate configuration
file included in `builders` via the syntax `@file`. For example, file included in `builders` via the syntax `@/path/to/file`. For example,
builders = @/etc/nix/machines builders = @/etc/nix/machines
causes the list of machines in `/etc/nix/machines` to be included. (This causes the list of machines in `/etc/nix/machines` to be included.
is the default.) (This is the default.)
If you want the builders to use caches, you likely want to set the
option `builders-use-substitutes` in your local `nix.conf`.
To build only on remote builders and disable building on the local
machine, you can use the option `--max-jobs 0`.

View file

@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
- <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\ - <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\
Delete all old generations of profiles. Delete all old generations of profiles.
This is the equivalent of invoking `nix-env --delete-generations old` on each found profile. This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile.
- <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\ - <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time). Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).

View file

@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile.
*generations* can be a one of the following: *generations* can be a one of the following:
- <span id="generations-list">`<number>...`</span>:\ - <span id="generations-list">[`<number>...`](#generations-list)</span>:\
A list of generation numbers, each one a separate command-line argument. A list of generation numbers, each one a separate command-line argument.
Delete exactly the profile generations given by their generation number. Delete exactly the profile generations given by their generation number.
Deleting the current generation is not allowed. Deleting the current generation is not allowed.
- The special value <span id="generations-old">`old`</span> - <span id="generations-old">[The special value `old`](#generations-old)</span>
Delete all generations except the current one. Delete all generations except the current one.
@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile.
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one. > Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
> They will also be deleted. > They will also be deleted.
- <span id="generations-time">`<number>d`</span>:\ - <span id="generations-time">[`<number>d`](#generations-time)</span>:\
The last *number* days The last *number* days
*Example*: `30d` *Example*: `30d`
@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile.
Delete all generations created more than *number* days ago, except the most recent one of them. Delete all generations created more than *number* days ago, except the most recent one of them.
This allows rolling back to generations that were available within the specified period. This allows rolling back to generations that were available within the specified period.
- <span id="generations-count">`+<number>`</span>:\ - <span id="generations-count">[`+<number>`](#generations-count)</span>:\
The last *number* generations up to the present The last *number* generations up to the present
*Example*: `+5` *Example*: `+5`

View file

@ -35,13 +35,51 @@ standard input.
- `--parse`\ - `--parse`\
Just parse the input files, and print their abstract syntax trees on Just parse the input files, and print their abstract syntax trees on
standard output in ATerm format. standard output as a Nix expression.
- `--eval`\ - `--eval`\
Just parse and evaluate the input files, and print the resulting Just parse and evaluate the input files, and print the resulting
values on standard output. No instantiation of store derivations values on standard output. No instantiation of store derivations
takes place. takes place.
> **Warning**
>
> This option produces output which can be parsed as a Nix expression which
> will produce a different result than the input expression when evaluated.
> For example, these two Nix expressions print the same result despite
> having different meaning:
>
> ```console
> $ nix-instantiate --eval --expr '{ a = {}; }'
> { a = <CODE>; }
> $ nix-instantiate --eval --expr '{ a = <CODE>; }'
> { a = <CODE>; }
> ```
>
> For human-readable output, `nix eval` (experimental) is more informative:
>
> ```console
> $ nix-instantiate --eval --expr 'a: a'
> <LAMBDA>
> $ nix eval --expr 'a: a'
> «lambda @ «string»:1:1»
> ```
>
> For machine-readable output, the `--xml` option produces unambiguous
> output:
>
> ```console
> $ nix-instantiate --eval --xml --expr '{ foo = <CODE>; }'
> <?xml version='1.0' encoding='utf-8'?>
> <expr>
> <attrs>
> <attr column="3" line="1" name="foo">
> <unevaluated />
> </attr>
> </attrs>
> </expr>
> ```
- `--find-file`\ - `--find-file`\
Look up the given files in Nixs search path (as specified by the Look up the given files in Nixs search path (as specified by the
`NIX_PATH` environment variable). If found, print the corresponding `NIX_PATH` environment variable). If found, print the corresponding
@ -61,11 +99,11 @@ standard input.
- `--json`\ - `--json`\
When used with `--eval`, print the resulting value as an JSON When used with `--eval`, print the resulting value as an JSON
representation of the abstract syntax tree rather than as an ATerm. representation of the abstract syntax tree rather than as a Nix expression.
- `--xml`\ - `--xml`\
When used with `--eval`, print the resulting value as an XML When used with `--eval`, print the resulting value as an XML
representation of the abstract syntax tree rather than as an ATerm. representation of the abstract syntax tree rather than as a Nix expression.
The schema is the same as that used by the [`toXML` The schema is the same as that used by the [`toXML`
built-in](../language/builtins.md). built-in](../language/builtins.md).
@ -133,28 +171,24 @@ $ nix-instantiate --eval --xml --expr '1 + 2'
The difference between non-strict and strict evaluation: The difference between non-strict and strict evaluation:
```console ```console
$ nix-instantiate --eval --xml --expr 'rec { x = "foo"; y = x; }' $ nix-instantiate --eval --xml --expr '{ x = {}; }'
... <?xml version='1.0' encoding='utf-8'?>
<attr name="x"> <expr>
<string value="foo" /> <attrs>
</attr> <attr column="3" line="1" name="x">
<attr name="y"> <unevaluated />
<unevaluated /> </attr>
</attr> </attrs>
... </expr>
```
Note that `y` is left unevaluated (the XML representation doesnt $ nix-instantiate --eval --xml --strict --expr '{ x = {}; }'
attempt to show non-normal forms). <?xml version='1.0' encoding='utf-8'?>
<expr>
```console <attrs>
$ nix-instantiate --eval --xml --strict --expr 'rec { x = "foo"; y = x; }' <attr column="3" line="1" name="x">
... <attrs>
<attr name="x"> </attrs>
<string value="foo" /> </attr>
</attr> </attrs>
<attr name="y"> </expr>
<string value="foo" />
</attr>
...
``` ```

View file

@ -172,7 +172,7 @@ Please observe these guidelines to ease reviews:
> ``` > ```
```` ````
Highlight syntax definiions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation: Highlight syntax definitions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation:
```` ````
> **Syntax** > **Syntax**

View file

@ -31,7 +31,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im
To get a shell with one of the other [supported compilation environments](#compilation-environments): To get a shell with one of the other [supported compilation environments](#compilation-environments):
```console ```console
$ nix develop .#native-clang11StdenvPackages $ nix develop .#native-clangStdenvPackages
``` ```
> **Note** > **Note**
@ -51,11 +51,14 @@ To install it in `$(pwd)/outputs` and test it:
```console ```console
[nix-shell]$ make install [nix-shell]$ make install
[nix-shell]$ make installcheck -j $NIX_BUILD_CORES [nix-shell]$ make installcheck check -j $NIX_BUILD_CORES
[nix-shell]$ nix --version [nix-shell]$ nix --version
nix (Nix) 2.12 nix (Nix) 2.12
``` ```
For more information on running and filtering tests, see
[`testing.md`](./testing.md).
To build a release version of Nix for the current operating system and CPU architecture: To build a release version of Nix for the current operating system and CPU architecture:
```console ```console
@ -64,27 +67,6 @@ $ nix build
You can also build Nix for one of the [supported platforms](#platforms). You can also build Nix for one of the [supported platforms](#platforms).
## Makefile variables
You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run
`make install`.
You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment
variables to override `Makefile` variables.
- `ENABLE_BUILD=yes` to enable building the C++ code.
- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.).
The docs can take a while to build, so you may want to disable this for local development.
- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests.
- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests.
- `OPTIMIZE=1` to enable optimizations.
- `libraries=libutil programs=` to only build a specific library (this will
fail in the linking phase if you don't have the other libraries built, but is
useful for checking types).
- `libraries= programs=nix` to only build a specific program (this will not, in
general, work, because the programs need the libraries).
## Building Nix ## Building Nix
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
@ -96,7 +78,7 @@ $ nix-shell
To get a shell with one of the other [supported compilation environments](#compilation-environments): To get a shell with one of the other [supported compilation environments](#compilation-environments):
```console ```console
$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages $ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages
``` ```
> **Note** > **Note**
@ -129,6 +111,26 @@ $ nix-build
You can also build Nix for one of the [supported platforms](#platforms). You can also build Nix for one of the [supported platforms](#platforms).
## Makefile variables
You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`.
Run `make` with [`-e` / `--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables:
- `ENABLE_BUILD=yes` to enable building the C++ code.
- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.).
The docs can take a while to build, so you may want to disable this for local development.
- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests.
- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests.
- `OPTIMIZE=1` to enable optimizations.
- `libraries=libutil programs=` to only build a specific library.
This will fail in the linking phase if the other libraries haven't been built, but is useful for checking types.
- `libraries= programs=nix` to only build a specific program.
This will not work in general, because the programs need the libraries.
## Platforms ## Platforms
Nix can be built for various platforms, as specified in [`flake.nix`]: Nix can be built for various platforms, as specified in [`flake.nix`]:
@ -145,10 +147,10 @@ Nix can be built for various platforms, as specified in [`flake.nix`]:
In order to build Nix for a different platform than the one you're currently In order to build Nix for a different platform than the one you're currently
on, you need a way for your current Nix installation to build code for that on, you need a way for your current Nix installation to build code for that
platform. Common solutions include [remote builders] and [binary format emulation] platform. Common solutions include [remote build machines] and [binary format emulation]
(only supported on NixOS). (only supported on NixOS).
[remote builders]: ../advanced-topics/distributed-builds.md [remote builders]: @docroot@/language/derivations.md#attr-builder
[binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems [binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
Given such a setup, executing the build only requires selecting the respective attribute. Given such a setup, executing the build only requires selecting the respective attribute.
@ -302,7 +304,6 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master
### Build process ### Build process
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`. Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
## Branches ## Branches

View file

@ -77,7 +77,7 @@ there is no risk of any build-system wildcards for the library accidentally pick
### Running tests ### Running tests
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`.
Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable, e.g. `GTEST_FILTER='ErrorTraceTest.*' make check`.
### Characterisation testing { #characaterisation-testing-unit } ### Characterisation testing { #characaterisation-testing-unit }

View file

@ -3,10 +3,10 @@
- [derivation]{#gloss-derivation} - [derivation]{#gloss-derivation}
A description of a build task. The result of a derivation is a A description of a build task. The result of a derivation is a
store object. Derivations are typically specified in Nix expressions store object. Derivations declared in Nix expressions are specified
using the [`derivation` primitive](./language/derivations.md). These are using the [`derivation` primitive](./language/derivations.md). These are
translated into low-level *store derivations* (implicitly by translated into low-level *store derivations* (implicitly by
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`). `nix-build`, or explicitly by `nix-instantiate`).
[derivation]: #gloss-derivation [derivation]: #gloss-derivation
@ -14,6 +14,7 @@
A [derivation] represented as a `.drv` file in the [store]. A [derivation] represented as a `.drv` file in the [store].
It has a [store path], like any [store object]. It has a [store path], like any [store object].
It is the [instantiated][instantiate] form of a derivation.
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv` Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
@ -23,9 +24,9 @@
- [instantiate]{#gloss-instantiate}, instantiation - [instantiate]{#gloss-instantiate}, instantiation
Translate a [derivation] into a [store derivation]. Save an evaluated [derivation] as a [store derivation] in the Nix [store].
See [`nix-instantiate`](./command-ref/nix-instantiate.md). See [`nix-instantiate`](./command-ref/nix-instantiate.md), which produces a store derivation from a Nix expression that evaluates to a derivation.
[instantiate]: #gloss-instantiate [instantiate]: #gloss-instantiate
@ -36,7 +37,7 @@
This can be achieved by: This can be achieved by:
- Fetching a pre-built [store object] from a [substituter] - Fetching a pre-built [store object] from a [substituter]
- Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation] - Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
- Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs - Delegating to a [remote machine](@docroot@/command-ref/conf-file.md#conf-builders) and retrieving the outputs
<!-- TODO: link [running] to build process page, #8888 --> <!-- TODO: link [running] to build process page, #8888 -->
See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm. See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
@ -58,23 +59,12 @@
- [store]{#gloss-store} - [store]{#gloss-store}
A collection of store objects, with operations to manipulate that collection. A collection of [store objects][store object], with operations to manipulate that collection.
See [Nix store](./store/index.md) for details. See [Nix Store](./store/index.md) for details.
There are many types of stores. There are many types of stores, see [Store Types](./store/types/index.md) for details.
See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
From the perspective of the location where Nix is invoked, the Nix store can be referred to _local_ or _remote_.
Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`.
Local stores can be used for building [derivations](#derivation).
See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
[store]: #gloss-store [store]: #gloss-store
[local store]: #gloss-local-store
- [chroot store]{#gloss-chroot-store}
A [local store] whose canonical path is anything other than `/nix/store`.
- [binary cache]{#gloss-binary-cache} - [binary cache]{#gloss-binary-cache}
@ -86,7 +76,7 @@
- [store path]{#gloss-store-path} - [store path]{#gloss-store-path}
The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory. The location of a [store object] in the file system, i.e., an immediate child of the Nix store directory.
> **Example** > **Example**
> >
@ -126,7 +116,7 @@
non-[fixed-output](#gloss-fixed-output-derivation) non-[fixed-output](#gloss-fixed-output-derivation)
derivation. derivation.
- [output-addressed store object]{#gloss-output-addressed-store-object} - [content-addressed store object]{#gloss-content-addressed-store-object}
A [store object] whose [store path] is determined by its contents. A [store object] whose [store path] is determined by its contents.
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation). This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
@ -155,6 +145,11 @@
builder can rely on external inputs such as the network or the builder can rely on external inputs such as the network or the
system time) but the Nix model assumes it. system time) but the Nix model assumes it.
- [impure derivation]{#gloss-impure-derivation}
[An experimental feature](#@docroot@/contributing/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
- [Nix database]{#gloss-nix-database} - [Nix database]{#gloss-nix-database}
An SQlite database to track [reference]s between [store object]s. An SQlite database to track [reference]s between [store object]s.
@ -166,11 +161,13 @@
- [Nix expression]{#gloss-nix-expression} - [Nix expression]{#gloss-nix-expression}
A high-level description of software packages and compositions 1. Commonly, a high-level description of software packages and compositions
thereof. Deploying software using Nix entails writing Nix thereof. Deploying software using Nix entails writing Nix
expressions for your packages. Nix expressions are translated to expressions for your packages. Nix expressions specify [derivations][derivation],
derivations that are stored in the Nix store. These derivations can which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation].
then be built. These derivations can then be [realised][realise] to produce [outputs][output].
2. A syntactically valid use of the [Nix language]. For example, the contents of a `.nix` file form an expression.
- [reference]{#gloss-reference} - [reference]{#gloss-reference}
@ -222,6 +219,9 @@
The [store derivation] that produced an [output path]. The [store derivation] that produced an [output path].
The deriver for an output path can be queried with the `--deriver` option to
[`nix-store --query`](@docroot@/command-ref/nix-store/query.md).
- [validity]{#gloss-validity} - [validity]{#gloss-validity}
A store path is valid if all [store object]s in its [closure] can be read from the [store]. A store path is valid if all [store object]s in its [closure] can be read from the [store].
@ -232,6 +232,7 @@
- All paths in the store path's [closure] are valid. - All paths in the store path's [closure] are valid.
[validity]: #gloss-validity [validity]: #gloss-validity
[local store]: @docroot@/store/types/local-store.md
- [user environment]{#gloss-user-env} - [user environment]{#gloss-user-env}
@ -266,6 +267,21 @@
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
- [package]{#package}
1. A software package; a collection of files and other data.
2. A [package attribute set].
- [package attribute set]{#package-attribute-set}
An [attribute set](@docroot@/language/values.md#attribute-set) containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as
- attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output),
- attributes that declare something about how the package is supposed to be installed or used,
- other metadata or arbitrary attributes.
[package attribute set]: #package-attribute-set
- [string interpolation]{#gloss-string-interpolation} - [string interpolation]{#gloss-string-interpolation}
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name]. Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
@ -282,3 +298,6 @@
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting. These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
[Nix language]: ./language/index.md

View file

@ -1,26 +1,60 @@
# Installing a Binary Distribution # Installing a Binary Distribution
The easiest way to install Nix is to run the following command: To install the latest version Nix, run the following command:
```console ```console
$ curl -L https://nixos.org/nix/install | sh $ curl -L https://nixos.org/nix/install | sh
``` ```
This will run the installer interactively (causing it to explain what This performs the default type of installation for your platform:
it is doing more explicitly), and perform the default "type" of install
for your platform:
- single-user on Linux
- multi-user on macOS
> **Notes on read-only filesystem root in macOS 10.15 Catalina +** - [Multi-user](#multi-user-installation):
> - Linux with systemd and without SELinux
> - It took some time to support this cleanly. You may see posts, - macOS
> examples, and tutorials using obsolete workarounds. - [Single-user](#single-user-installation):
> - Supporting it cleanly made macOS installs too complex to qualify - Linux without systemd
> as single-user, so this type is no longer supported on macOS. - Linux with SELinux
We recommend the multi-user install if it supports your platform and We recommend the multi-user installation if it supports your platform and you can authenticate with `sudo`.
you can authenticate with `sudo`.
The installer can configured with various command line arguments and environment variables.
To show available command line flags:
```console
$ curl -L https://nixos.org/nix/install | sh -s -- --help
```
To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball).
# Installing a pinned Nix version from a URL
Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/).
The directory for each version contains the corresponding SHA-256 hash.
All installation scripts are invoked the same way:
```console
$ export VERSION=2.19.2
$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh
```
# Multi User Installation
The multi-user Nix installation creates system users and a system service for the Nix daemon.
Supported systems:
- Linux running systemd, with SELinux disabled
- macOS
To explicitly instruct the installer to perform a multi-user installation on your system:
```console
$ curl -L https://nixos.org/nix/install | sh -s -- --daemon
```
You can run this under your usual user account or `root`.
The script will invoke `sudo` as needed.
# Single User Installation # Single User Installation
@ -30,60 +64,48 @@ To explicitly select a single-user installation on your system:
$ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon $ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon
``` ```
This will perform a single-user installation of Nix, meaning that `/nix` In a single-user installation, `/nix` is owned by the invoking user.
is owned by the invoking user. You can run this under your usual user The script will invoke `sudo` to create `/nix` if it doesnt already exist.
account or root. The script will invoke `sudo` to create `/nix` If you dont have `sudo`, manually create `/nix` as `root`:
if it doesnt already exist. If you dont have `sudo`, you should
manually create `/nix` first as root, e.g.:
```console ```console
$ mkdir /nix $ su root
$ chown alice /nix # mkdir /nix
# chown alice /nix
``` ```
The install script will modify the first writable file from amongst # Installing from a binary tarball
`.bash_profile`, `.bash_login` and `.profile` to source
`~/.nix-profile/etc/profile.d/nix.sh`. You can set the
`NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing
the install script to disable this behaviour.
# Multi User Installation You can also download a binary tarball that contains Nix and all its dependencies:
- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../contributing/hacking.md#platforms)
- Download and unpack the tarball
- Run the installer
The multi-user Nix installation creates system users, and a system > **Example**
service for the Nix daemon.
**Supported Systems**
- Linux running systemd, with SELinux disabled
- macOS
You can instruct the installer to perform a multi-user installation on
your system:
```console
$ curl -L https://nixos.org/nix/install | sh -s -- --daemon
```
The multi-user installation of Nix will create build users between the
user IDs 30001 and 30032, and a group with the group ID 30000. You
can run this under your usual user account or root. The script
will invoke `sudo` as needed.
> **Note**
> >
> If you need Nix to use a different group ID or user ID set, you will > ```console
> have to download the tarball manually and [edit the install > $ pushd $(mktemp -d)
> script](#installing-from-a-binary-tarball). > $ export VERSION=2.19.2
> $ export SYSTEM=x86_64-linux
> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz
> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz
> $ cd nix-$VERSION-$SYSTEM
> $ ./install
> $ popd
> ```
The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist. The installer can be customised with the environment variables declared in the file named `install-multi-user`.
The installer will first back up these files with a `.backup-before-nix`
extension. The installer will also create `/etc/profile.d/nix.sh`. ## Native packages for Linux distributions
The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/).
# macOS Installation # macOS Installation
<!-- anchors to catch existing links -->
[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} []{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes}
<!-- Note: anchors above to catch permalinks to old explanations -->
We believe we have ironed out how to cleanly support the read-only root We believe we have ironed out how to cleanly support the read-only root file system
on modern macOS. New installs will do this automatically. on modern macOS. New installs will do this automatically.
This section previously detailed the situation, options, and trade-offs, This section previously detailed the situation, options, and trade-offs,
@ -126,33 +148,3 @@ this to run the installer, but it may help if you run into trouble:
boot process to avoid problems loading or restoring any programs that boot process to avoid problems loading or restoring any programs that
need access to your Nix store need access to your Nix store
# Installing a pinned Nix version from a URL
Version-specific installation URLs for all Nix versions
since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/).
The corresponding SHA-256 hash can be found in the directory for the given version.
These install scripts can be used the same as usual:
```console
$ curl -L https://releases.nixos.org/nix/nix-<version>/install | sh
```
# Installing from a binary tarball
You can also download a binary tarball that contains Nix and all its
dependencies. (This is what the install script at
<https://nixos.org/nix/install> does automatically.) You should unpack
it somewhere (e.g. in `/tmp`), and then run the script named `install`
inside the binary tarball:
```console
$ cd /tmp
$ tar xfj nix-1.8-x86_64-darwin.tar.bz2
$ cd nix-1.8-x86_64-darwin
$ ./install
```
If you need to edit the multi-user installation script to use different
group ID or a different user ID range, modify the variables set in the
file named `install-multi-user`.

View file

@ -32,11 +32,15 @@
your distribution does not provide it, please install it from your distribution does not provide it, please install it from
<http://www.sqlite.org/>. <http://www.sqlite.org/>.
- The [Boehm garbage collector](http://www.hboehm.info/gc/) to reduce - The [Boehm garbage collector (`bdw-gc`)](http://www.hboehm.info/gc/) to reduce
the evaluators memory consumption (optional). To enable it, install the evaluators memory consumption (optional).
To enable it, install
`pkgconfig` and the Boehm garbage collector, and pass the flag `pkgconfig` and the Boehm garbage collector, and pass the flag
`--enable-gc` to `configure`. `--enable-gc` to `configure`.
For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied.
- The `boost` library of version 1.66.0 or higher. It can be obtained - The `boost` library of version 1.66.0 or higher. It can be obtained
from the official web site <https://www.boost.org/>. from the official web site <https://www.boost.org/>.

View file

@ -1,14 +1,40 @@
# Upgrading Nix # Upgrading Nix
Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c > **Note**
'nix-channel --update && >
nix-env --install --attr nixpkgs.nix && > These upgrade instructions apply where Nix was installed following the [installation instructions in this manual](./index.md).
launchctl remove org.nixos.nix-daemon &&
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'`
Single-user installations of Nix should run this: `nix-channel --update; Check which Nix version will be installed, for example from one of the [release channels](http://channels.nixos.org/) such as `nixpkgs-unstable`:
nix-env --install --attr nixpkgs.nix nixpkgs.cacert`
Multi-user Nix users on Linux should run this with sudo: `nix-channel ```console
--update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl $ nix-shell -p nix -I nixpkgs=channel:nixpkgs-unstable --run "nix --version"
daemon-reload; systemctl restart nix-daemon` nix (Nix) 2.18.1
```
> **Warning**
>
> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema!
> Reverting to an older version of Nix may therefore require purging the store database before it can be used.
## Linux multi-user
```console
$ sudo su
# nix-env --install --file '<nixpkgs>' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable
# systemctl daemon-reload
# systemctl restart nix-daemon
```
## macOS multi-user
```console
$ sudo nix-env --install --file '<nixpkgs>' --attr nix -I nixpkgs=channel:nixpkgs-unstable
$ sudo launchctl remove org.nixos.nix-daemon
$ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist
```
## Single-user all platforms
```console
$ nix-env --install --file '<nixpkgs>' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable
```

View file

@ -257,7 +257,7 @@ Derivations can declare some infrequently used optional attributes.
of the environment (typically, a few hundred kilobyte). of the environment (typically, a few hundred kilobyte).
- [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\ - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine. If this attribute is set to `true` and [distributed building is enabled](@docroot@/command-ref/conf-file.md#conf-builders), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
This is useful for derivations that are cheapest to build locally. This is useful for derivations that are cheapest to build locally.
- [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\ - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\

View file

@ -36,7 +36,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
The system type on which the [`builder`](#attr-builder) executable is meant to be run. The system type on which the [`builder`](#attr-builder) executable is meant to be run.
A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option]. A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines. It can automatically [build on other platforms](@docroot@/language/derivations.md#attr-builder) by forwarding build requests to other machines.
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system [`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
@ -274,7 +274,7 @@ The [`builder`](#attr-builder) is executed as follows:
directory (typically, `/nix/store`). directory (typically, `/nix/store`).
- `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs` - `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs`
is set to `true` for the dervation. A detailed explanation of this is set to `true` for the derivation. A detailed explanation of this
behavior can be found in the behavior can be found in the
[section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs). [section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs).

View file

@ -1,6 +1,8 @@
# Import From Derivation # Import From Derivation
The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object). The value of a Nix expression can depend on the contents of a [store object].
[store object]: @docroot@/glossary.md#gloss-store-object
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD): Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):

View file

@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths.
> >
> *string* `+` *string* > *string* `+` *string*
Concatenate two [string]s and merge their string contexts. Concatenate two [strings][string] and merge their string contexts.
[String concatenation]: #string-concatenation [String concatenation]: #string-concatenation
@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts.
> >
> *path* `+` *path* > *path* `+` *path*
Concatenate two [path]s. Concatenate two [paths][path].
The result is a path. The result is a path.
[Path concatenation]: #path-concatenation [Path concatenation]: #path-concatenation
@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is
Comparison is Comparison is
- [arithmetic] for [number]s - [arithmetic] for [numbers][number]
- lexicographic for [string]s and [path]s - lexicographic for [strings][string] and [paths][path]
- item-wise lexicographic for [list]s: - item-wise lexicographic for [lists][list]:
elements at the same index in both lists are compared according to their type and skipped if they are equal. elements at the same index in both lists are compared according to their type and skipped if they are equal.
All comparison operators are implemented in terms of `<`, and the following equivalencies hold: All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi
| *a* `>` *b* | *b* `<` *a* | | *a* `>` *b* | *b* `<` *a* |
| *a* `>=` *b* | `! (` *a* `<` *b* `)` | | *a* `>=` *b* | `! (` *a* `<` *b* `)` |
[Comparison]: #comparison-operators [Comparison]: #comparison
## Equality ## Equality
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated. - [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated.
- Comparison of [function]s always returns `false`. - Comparison of [functions][function] always returns `false`.
- Numbers are type-compatible, see [arithmetic] operators. - Numbers are type-compatible, see [arithmetic] operators.
- Floating point numbers only differ up to a limited precision. - Floating point numbers only differ up to a limited precision.

View file

@ -20,6 +20,8 @@ Rather than writing
(where `freetype` is a [derivation]), you can instead write (where `freetype` is a [derivation]), you can instead write
[derivation]: ../glossary.md#gloss-derivation
```nix ```nix
"--with-freetype2-library=${freetype}/lib" "--with-freetype2-library=${freetype}/lib"
``` ```
@ -189,7 +191,7 @@ If neither is present, an error is thrown.
> "${a}" > "${a}"
> ``` > ```
> >
> error: cannot coerce a set to a string > error: cannot coerce a set to a string: { }
> >
> at «string»:4:2: > at «string»:4:2:
> >

View file

@ -156,6 +156,8 @@ function and the fifth being a set.
Note that lists are only lazy in values, and they are strict in length. Note that lists are only lazy in values, and they are strict in length.
Elements in a list can be accessed using [`builtins.elemAt`](./builtins.md#builtins-elemAt).
## Attribute Set ## Attribute Set
An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`). An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`).

View file

@ -0,0 +1,71 @@
# Derivation JSON Format
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
> and subject to change.
The JSON serialization of a
[derivations](@docroot@/glossary.md#gloss-store-derivation)
is a JSON object with the following fields:
* `name`:
The name of the derivation.
This is used when calculating the store paths of the derivation's outputs.
* `outputs`:
Information about the output paths of the derivation.
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields:
* `path`: The output path.
* `hashAlgo`:
For fixed-output derivations, the hashing algorithm (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a NAR hash rather than a flat file hash.
* `hash`:
For fixed-output derivations, the expected content hash in base-16.
> **Example**
>
> ```json
> "outputs": {
> "out": {
> "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source",
> "hashAlgo": "r:sha256",
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
> }
> }
> ```
* `inputSrcs`:
A list of store paths on which this derivation depends.
* `inputDrvs`:
A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations.
> **Example**
>
> ```json
> "inputDrvs": {
> "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
> "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
> }
> ```
specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
* `system`:
The system type on which this derivation is to be built
(e.g. `x86_64-linux`).
* `builder`:
The absolute path of the program to be executed to run the build.
Typically this is the `bash` shell
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
* `args`:
The command-line arguments passed to the `builder`.
* `env`:
The environment passed to the `builder`.

View file

@ -0,0 +1,98 @@
# Store object info JSON format
> **Warning**
>
> This JSON format is currently
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
> and subject to change.
Info about a [store object].
* `path`:
[Store path][store path] to the given store object.
* `narHash`:
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
* `narSize`:
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
* `references`:
An array of [store paths][store path], possibly including this one.
* `ca` (optional):
Content address of this store object's file system object, used to compute its store path.
[store path]: @docroot@/glossary.md#gloss-store-path
[file system object]: @docroot@/store/file-system-object.md
[Nix Archive]: @docroot@/glossary.md#gloss-nar
## Impure fields
These are not intrinsic properties of the store object.
In other words, the same store object residing in different store could have different values for these properties.
* `deriver` (optional):
The path to the [derivation] from which this store object is produced.
[derivation]: @docroot@/glossary.md#gloss-store-derivation
* `registrationTime` (optional):
When this derivation was added to the store.
* `ultimate` (optional):
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
* `signatures` (optional):
Signatures claiming that this store object is what it claims to be.
Not relevant for [content-addressed] store objects,
but useful for [input-addressed] store objects.
[content-addressed]: @docroot@/glossary.md#gloss-content-addressed-store-object
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
### `.narinfo` extra fields
This meta data is specific to the "binary cache" family of Nix store types.
This information is not intrinsic to the store object, but about how it is stored.
* `url`:
Where to download a compressed archive of the file system objects of this store object.
* `compression`:
The compression format that the archive is in.
* `fileHash`:
A digest for the compressed archive itself, as opposed to the data contained within.
* `fileSize`:
The size of the compressed archive itself.
## Computed closure fields
These fields are not stored at all, but computed by traverising the other other fields across all the store objects in a [closure].
* `closureSize`:
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
### `.narinfo` extra fields
* `closureSize`:
The total size of this store object and every other object in its [closure].
[closure]: @docroot@/glossary.md#gloss-closure

View file

@ -0,0 +1,126 @@
# Complete Store Path Calculation
This is the complete specification for how store paths are calculated.
The format of this specification is close to [Extended BackusNaur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), but must deviate for a few things such as hash functions which we treat as bidirectional for specification purposes.
Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
## Store path proper
```ebnf
store-path = store-dir "/" digest "-" name
```
where
- `name` = the name of the store object.
- `store-dir` = the [store directory](@docroot@/store/store-path.md#store-directory)
- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
This the hash part of the store name
## Fingerprint
- ```ebnf
fingerprint = type ":" sha256 ":" inner-digest ":" store ":" name
```
Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
(e.g. you won't get `/nix/store/<digest>-name1` and `/nix/store/<digest>-name2`, or `/gnu/store/<digest>-name1`, with equal hash parts).
- `type` = one of:
- ```ebnf
| "text" ( ":" store-path )*
```
for encoded derivations written to the store.
The optional trailing store paths are the references of the store object.
- ```ebnf
| "source" ( ":" store-path )*
```
For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
Just like in the text case, we can have the store objects referenced by their paths.
Additionally, we can have an optional `:self` label to denote self reference.
- ```ebnf
| "output:" id
```
For either the outputs built from derivations,
paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
(in that case "source" is used; this is only necessary for compatibility).
`id` is the name of the output (usually, "out").
For content-addressed store objects, `id`, is always "out".
- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
## Inner fingerprint
- `inner-fingerprint` = one of the following based on `type`:
- if `type` = `"text:" ...`:
the string written to the resulting store path.
- if `type` = `"source:" ...`:
the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
- if `type` = `"output:" id`:
- For input-addressed derivation outputs:
the [ATerm](@docroot@/protocols/derivation-aterm.md) serialization of the derivation modulo fixed output derivations.
- For content-addressed store paths:
```ebnf
"fixed:out:" rec algo ":" hash ":"
```
where
- `rec` = one of:
- ```ebnf
| "r:"
```
hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
- ```ebnf
| ""
```
(empty string) for hashes of the flat (single file) serialization
- ```ebnf
algo = "md5" | "sha1" | "sha256"
```
- `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
Note that `id` = `"out"`, regardless of the name part of the store path.
Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
[Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
### Historical Note
The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
in that both can represent data hashed by its SHA-256 NAR serialization.
The original reason for this way of computing names was to prevent name collisions (for security).
For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
Now, data that is content-addressed with SHA-256 + NAR-serialization always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
It also removes the ambiguity from the grammar.

View file

@ -10,7 +10,6 @@ For more in-depth information you are kindly referred to subsequent chapters.
``` ```
The install script will use `sudo`, so make sure you have sufficient rights. The install script will use `sudo`, so make sure you have sufficient rights.
On Linux, `--daemon` can be omitted for a single-user install.
For other installation methods, see the detailed [installation instructions](installation/index.md). For other installation methods, see the detailed [installation instructions](installation/index.md).

View file

@ -0,0 +1,169 @@
# Release 2.20.0 (2024-01-29)
- Option `allowed-uris` can now match whole schemes in URIs without slashes [#9547](https://github.com/NixOS/nix/pull/9547)
If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed.
Previously this only worked for schemes whose URIs used the `://` syntax.
- Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598)
Nix now also reports cgroup statistics when building through the Nix daemon and when doing remote builds using `ssh-ng`,
if both sides of the connection are using Nix 2.20 or newer.
- Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481)
[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`.
- Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093)
Add a new `eval-system` option.
Unlike `system`, it just overrides the value of `builtins.currentSystem`.
This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system.
In contrast, `system` also affects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense.
`eval-system` only takes effect if it is non-empty.
If empty (the default) `system` is used as before, so there is no breakage.
- Import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661)
When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option.
Because the resulting Nix expression must be copied back to the evaluation store in order to be imported, this requires the evaluation store to trust the build store's signatures.
- Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912)
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
- Rename `nix show-config` to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477)
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command line interface.
- Add command `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452)
This replaces the old `nix hash to-*` commands, which are still available but will emit a deprecation warning. Please convert as follows:
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` or even just `nix hash convert $hash1 $hash2` instead.
- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452)
Hash format `base32` was renamed to `nix32` since it used a special Nix-specific character set for
[Base32](https://en.wikipedia.org/wiki/Base32).
- `nix profile` now allows referring to elements by human-readable names [#8678](https://github.com/NixOS/nix/pull/8678)
[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed.
**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix.
- Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809)
Adds a missing feature that was present in the old CLI, and matches our
plans to have similar flags for `nix hash convert` and `nix hash path`.
- Coercion errors include the failing value
The `error: cannot coerce a <TYPE> to a string` message now includes the value
which caused the error.
Before:
```
error: cannot coerce a set to a string
```
After:
```
error: cannot coerce a set to a string: { aesSupport = «thunk»;
avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»;
canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion
= «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84
attributes elided»}
```
- Type errors include the failing value
In errors like `value is an integer while a list was expected`, the message now
includes the failing value.
Before:
```
error: value is a set while a string was expected
```
After:
```
error: expected a string but found a set: { ghc810 = «thunk»;
ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»;
ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»;
ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»}
```
- Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555)
Source location information is now included in error messages more
consistently. Given this code:
```nix
let
attr = {foo = "bar";};
key = {};
in
attr.${key}
```
Previously, Nix would show this unhelpful message when attempting to evaluate
it:
```
error:
… while evaluating an attribute name
error: value is a set while a string was expected
```
Now, the error message displays where the problematic value was found:
```
error:
… while evaluating an attribute name
at bad.nix:4:11:
3| key = {};
4| in attr.${key}
| ^
5|
error: expected a string but found a set
```
- Some stack overflow segfaults are fixed [#9616](https://github.com/NixOS/nix/issues/9616) [#9617](https://github.com/NixOS/nix/pull/9617)
The number of nested function calls has been restricted, to detect and report
infinite function call recursions. The default maximum call depth is 10,000 and
can be set with [the `max-call-depth`
option](@docroot@/command-ref/conf-file.md#conf-max-call-depth).
This replaces the `stack overflow (possible infinite recursion)` message.
- Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658)
`with` expressions using non-attrset values to resolve variables are now reported with proper positions, e.g.
```
nix-repl> with 1; a
error:
… while evaluating the first subexpression of a with expression
at «string»:1:1:
1| with 1; a
| ^
error: expected a set but found an integer
```

View file

@ -34,16 +34,16 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1701355166, "lastModified": 1705033721,
"narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=", "narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c", "rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "staging-23.05", "ref": "nixos-23.05-small",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View file

@ -1,17 +1,7 @@
{ {
description = "The purely functional package manager"; description = "The purely functional package manager";
# TODO Go back to nixos-23.05-small once inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
# https://github.com/NixOS/nixpkgs/pull/271202 is merged.
#
# Also, do not grab arbitrary further staging commits. This PR was
# carefully made to be based on release-23.05 and just contain
# rebuild-causing changes to packages that Nix actually uses.
#
# Once this is updated to something containing
# https://github.com/NixOS/nixpkgs/pull/271423, don't forget
# to remove the `nix.checkAllErrors = false;` line in the tests.
inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
@ -62,7 +52,6 @@
stdenvs = [ stdenvs = [
"ccacheStdenv" "ccacheStdenv"
"clang11Stdenv"
"clangStdenv" "clangStdenv"
"gccStdenv" "gccStdenv"
"libcxxStdenv" "libcxxStdenv"
@ -174,10 +163,10 @@
enableLargeConfig = true; enableLargeConfig = true;
}).overrideAttrs(o: { }).overrideAttrs(o: {
patches = (o.patches or []) ++ [ patches = (o.patches or []) ++ [
./boehmgc-coroutine-sp-fallback.diff ./dep-patches/boehmgc-coroutine-sp-fallback.diff
# https://github.com/ivmai/bdwgc/pull/586 # https://github.com/ivmai/bdwgc/pull/586
./boehmgc-traceable_allocator-public.diff ./dep-patches/boehmgc-traceable_allocator-public.diff
]; ];
}); });
@ -201,19 +190,18 @@
boehmgc = final.boehmgc-nix; boehmgc = final.boehmgc-nix;
libgit2 = final.libgit2-nix; libgit2 = final.libgit2-nix;
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
changelog-d = final.changelog-d-nix;
} // { } // {
# this is a proper separate downstream package, but put # this is a proper separate downstream package, but put
# here also for back compat reasons. # here also for back compat reasons.
perl-bindings = final.nix-perl-bindings; perl-bindings = final.nix-perl-bindings;
}; };
nix-perl-bindings = final.callPackage ./perl { nix-perl-bindings = final.callPackage ./perl {
inherit fileset stdenv; inherit fileset stdenv;
};
}; };
};
in { in {
# A Nixpkgs overlay that overrides the 'nix' and # A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages. # 'nix.perl-bindings' packages.
@ -231,14 +219,25 @@
buildCross = forAllCrossSystems (crossSystem: buildCross = forAllCrossSystems (crossSystem:
lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}")); lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];})); buildNoGc = forAllSystems (system:
self.packages.${system}.nix.override { enableGC = false; }
);
buildNoTests = forAllSystems (system: buildNoTests = forAllSystems (system:
self.packages.${system}.nix.overrideAttrs (a: { self.packages.${system}.nix.override {
doCheck = doCheck = false;
assert ! a?dontCheck; doInstallCheck = false;
false; installUnitTests = false;
}) }
);
# Toggles some settings for better coverage. Windows needs these
# library combinations, and Debian build Nix with GNU readline too.
buildReadlineNoMarkdown = forAllSystems (system:
self.packages.${system}.nix.override {
enableMarkdown = false;
readlineFlavor = "readline";
}
); );
# Perl bindings for various platforms. # Perl bindings for various platforms.
@ -363,7 +362,7 @@
}); });
packages = forAllSystems (system: rec { packages = forAllSystems (system: rec {
inherit (nixpkgsFor.${system}.native) nix; inherit (nixpkgsFor.${system}.native) nix changelog-d-nix;
default = nix; default = nix;
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems) { } // (lib.optionalAttrs (builtins.elem system linux64BitSystems) {
nix-static = nixpkgsFor.${system}.static.nix; nix-static = nixpkgsFor.${system}.static.nix;

View file

@ -43,7 +43,11 @@ The team meets twice a week:
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) - Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min)
2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min) 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min).
Once a month, each team member checks the [Assigned](#assigned) column for prs/issues assigned to them, to either
- unblock it by providing input
- mark it as draft if it is blocked on the contributor
- escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again.
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) - Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)

View file

@ -1,7 +1,5 @@
#!/usr/bin/env nix-shell #!/usr/bin/env nix
#!nix-shell -i bash ../shell.nix -I nixpkgs=channel:nixos-unstable-small #!nix shell .#changelog-d-nix --command bash
# ^^^^^^^
# Only used for bash. shell.nix goes to the flake.
# --- CONFIGURATION --- # --- CONFIGURATION ---

View file

@ -27,8 +27,9 @@ release:
* Compile the release notes by running * Compile the release notes by running
```console ```console
$ export VERSION=X.YY
$ git checkout -b release-notes $ git checkout -b release-notes
$ VERSION=X.YY ./maintainers/release-notes $ ./maintainers/release-notes
``` ```
where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~. where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~.

View file

@ -12,24 +12,7 @@ man-pages :=
install-tests := install-tests :=
install-tests-groups := install-tests-groups :=
ifdef HOST_OS include mk/platform.mk
HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
ifeq ($(HOST_KERNEL), cygwin)
HOST_CYGWIN = 1
endif
ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
HOST_DARWIN = 1
endif
ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
HOST_FREEBSD = 1
endif
ifeq ($(HOST_KERNEL), linux)
HOST_LINUX = 1
endif
ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
HOST_SOLARIS = 1
endif
endif
# Hack to define a literal space. # Hack to define a literal space.
space := space :=
@ -114,6 +97,10 @@ $(foreach test-group, $(install-tests-groups), \
$(eval $(call run-test,$(test),$(install_test_init))) \ $(eval $(call run-test,$(test),$(install_test_init))) \
$(eval $(test-group).test-group: $(test).test))) $(eval $(test-group).test-group: $(test).test)))
# Include makefiles requiring built programs.
$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file)))))) $(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))

View file

@ -3,13 +3,19 @@ libs-list :=
ifdef HOST_DARWIN ifdef HOST_DARWIN
SO_EXT = dylib SO_EXT = dylib
else else
ifdef HOST_CYGWIN ifdef HOST_WINDOWS
SO_EXT = dll SO_EXT = dll
else else
SO_EXT = so SO_EXT = so
endif endif
endif endif
ifdef HOST_UNIX
THREAD_LDFLAGS = -pthread
else
THREAD_LDFLAGS =
endif
# Build a library with symbolic name $(1). The library is defined by # Build a library with symbolic name $(1). The library is defined by
# various variables prefixed by $(1)_: # various variables prefixed by $(1)_:
# #
@ -59,7 +65,7 @@ define build-library
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
_libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH)) _libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH))
ifdef HOST_CYGWIN ifdef HOST_WINDOWS
$(1)_INSTALL_DIR ?= $$(bindir) $(1)_INSTALL_DIR ?= $$(bindir)
else else
$(1)_INSTALL_DIR ?= $$(libdir) $(1)_INSTALL_DIR ?= $$(libdir)
@ -79,7 +85,7 @@ define build-library
endif endif
else else
ifndef HOST_DARWIN ifndef HOST_DARWIN
ifndef HOST_CYGWIN ifndef HOST_WINDOWS
$(1)_LDFLAGS += -Wl,-z,defs $(1)_LDFLAGS += -Wl,-z,defs
endif endif
endif endif

32
mk/platform.mk Normal file
View file

@ -0,0 +1,32 @@
ifdef HOST_OS
HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),)
HOST_MINGW = 1
HOST_WINDOWS = 1
endif
ifeq ($(HOST_KERNEL), cygwin)
HOST_CYGWIN = 1
HOST_WINDOWS = 1
HOST_UNIX = 1
endif
ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
HOST_DARWIN = 1
HOST_UNIX = 1
endif
ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
HOST_FREEBSD = 1
HOST_UNIX = 1
endif
ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),)
HOST_NETBSD = 1
HOST_UNIX = 1
endif
ifeq ($(HOST_KERNEL), linux)
HOST_LINUX = 1
HOST_UNIX = 1
endif
ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
HOST_SOLARIS = 1
HOST_UNIX = 1
endif
endif

View file

@ -1,5 +1,11 @@
programs-list := programs-list :=
ifdef HOST_WINDOWS
EXE_EXT = .exe
else
EXE_EXT =
endif
# Build a program with symbolic name $(1). The program is defined by # Build a program with symbolic name $(1). The program is defined by
# various variables prefixed by $(1)_: # various variables prefixed by $(1)_:
# #
@ -31,7 +37,7 @@ define build-program
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src))) _srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
_libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH))) _libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH)))
$(1)_PATH := $$(_d)/$$($(1)_NAME) $(1)_PATH := $$(_d)/$$($(1)_NAME)$(EXE_EXT)
$$(eval $$(call create-dir, $$(_d))) $$(eval $$(call create-dir, $$(_d)))
@ -42,7 +48,7 @@ define build-program
ifdef $(1)_INSTALL_DIR ifdef $(1)_INSTALL_DIR
$(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME) $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)$(EXE_EXT)
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))

View file

@ -10,10 +10,10 @@ endef
ifneq ($(MAKECMDGOALS), clean) ifneq ($(MAKECMDGOALS), clean)
$(buildprefix)%.h: %.h.in $(buildprefix)%.h: %.h.in $(buildprefix)config.status
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%) $(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
$(buildprefix)%: %.in $(buildprefix)%: %.in $(buildprefix)config.status
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%) $(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
endif endif

View file

@ -10,9 +10,9 @@
, boost , boost
, brotli , brotli
, bzip2 , bzip2
, changelog-d
, curl , curl
, editline , editline
, readline
, fileset , fileset
, flex , flex
, git , git
@ -68,10 +68,24 @@
# Whether to build the regular manual # Whether to build the regular manual
, enableManual ? __forDefaults.canRunInstalled , enableManual ? __forDefaults.canRunInstalled
# Whether to compile `rl-next.md`, the release notes for the next # Whether to use garbage collection for the Nix language evaluator.
# not-yet-released version of Nix in the manul, from the individual #
# change log entries in the directory. # If it is disabled, we just leak memory, but this is not as bad as it
, buildUnreleasedNotes ? false # sounds so long as evaluation just takes places within short-lived
# processes. (When the process exits, the memory is reclaimed; it is
# only leaked *within* the process.)
, enableGC ? true
# Whether to enable Markdown rendering in the Nix binary.
, enableMarkdown ? !stdenv.hostPlatform.isWindows
# Which interactive line editor library to use for Nix's repl.
#
# Currently supported choices are:
#
# - editline (default)
# - readline
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
# Whether to build the internal API docs, can be done separately from # Whether to build the internal API docs, can be done separately from
# everything else. # everything else.
@ -80,7 +94,7 @@
# Whether to install unit tests. This is useful when cross compiling # Whether to install unit tests. This is useful when cross compiling
# since we cannot run them natively during the build, but can do so # since we cannot run them natively during the build, but can do so
# later. # later.
, installUnitTests ? __forDefaults.canRunInstalled , installUnitTests ? doBuild && !__forDefaults.canExecuteHost
# For running the functional tests against a pre-built Nix. Probably # For running the functional tests against a pre-built Nix. Probably
# want to use in conjunction with `doBuild = false;`. # want to use in conjunction with `doBuild = false;`.
@ -93,7 +107,8 @@
# Not a real argument, just the only way to approximate let-binding some # Not a real argument, just the only way to approximate let-binding some
# stuff for argument defaults. # stuff for argument defaults.
, __forDefaults ? { , __forDefaults ? {
canRunInstalled = doBuild && stdenv.buildPlatform.canExecute stdenv.hostPlatform; canExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
canRunInstalled = doBuild && __forDefaults.canExecuteHost;
} }
}: }:
@ -153,7 +168,6 @@ in {
./mk ./mk
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
] ++ lib.optionals doBuild [ ] ++ lib.optionals doBuild [
./boehmgc-coroutine-sp-fallback.diff
./doc ./doc
./misc ./misc
./precompiled-headers.h ./precompiled-headers.h
@ -164,6 +178,10 @@ in {
./doc/manual ./doc/manual
] ++ lib.optionals enableInternalAPIDocs [ ] ++ lib.optionals enableInternalAPIDocs [
./doc/internal-api ./doc/internal-api
# Source might not be compiled, but still must be available
# for Doxygen to gather comments.
./src
./tests/unit
] ++ lib.optionals buildUnitTests [ ] ++ lib.optionals buildUnitTests [
./tests/unit ./tests/unit
] ++ lib.optionals doInstallCheck [ ] ++ lib.optionals doInstallCheck [
@ -194,9 +212,6 @@ in {
] ++ lib.optionals (doInstallCheck || enableManual) [ ] ++ lib.optionals (doInstallCheck || enableManual) [
jq # Also for custom mdBook preprocessor. jq # Also for custom mdBook preprocessor.
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
# Official releases don't have rl-next, so we don't need to compile a
# changelog
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
++ lib.optional enableInternalAPIDocs doxygen ++ lib.optional enableInternalAPIDocs doxygen
; ;
@ -211,9 +226,12 @@ in {
openssl openssl
sqlite sqlite
xz xz
] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ ({ inherit readline editline; }.${readlineFlavor})
editline ] ++ lib.optionals enableMarkdown [
lowdown lowdown
] ++ lib.optionals buildUnitTests [
gtest
rapidcheck
] ++ lib.optional stdenv.isLinux libseccomp ] ++ lib.optional stdenv.isLinux libseccomp
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
# There have been issues building these dependencies # There have been issues building these dependencies
@ -225,18 +243,12 @@ in {
; ;
propagatedBuildInputs = [ propagatedBuildInputs = [
boehmgc
nlohmann_json nlohmann_json
]; ] ++ lib.optional enableGC boehmgc;
dontBuild = !attrs.doBuild; dontBuild = !attrs.doBuild;
doCheck = attrs.doCheck; doCheck = attrs.doCheck;
checkInputs = [
gtest
rapidcheck
];
nativeCheckInputs = [ nativeCheckInputs = [
git git
mercurial mercurial
@ -250,7 +262,7 @@ in {
# Copy libboost_context so we don't get all of Boost in our closure. # Copy libboost_context so we don't get all of Boost in our closure.
# https://github.com/NixOS/nixpkgs/issues/45462 # https://github.com/NixOS/nixpkgs/issues/45462
mkdir -p $out/lib mkdir -p $out/lib
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
rm -f $out/lib/*.a rm -f $out/lib/*.a
'' + lib.optionalString stdenv.hostPlatform.isLinux '' '' + lib.optionalString stdenv.hostPlatform.isLinux ''
chmod u+w $out/lib/*.so.* chmod u+w $out/lib/*.so.*
@ -271,7 +283,10 @@ in {
(lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature doInstallCheck "functional-tests")
(lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableInternalAPIDocs "internal-api-docs")
(lib.enableFeature enableManual "doc-gen") (lib.enableFeature enableManual "doc-gen")
(lib.enableFeature enableGC "gc")
(lib.enableFeature enableMarkdown "markdown")
(lib.enableFeature installUnitTests "install-unit-tests") (lib.enableFeature installUnitTests "install-unit-tests")
(lib.withFeatureAs true "readline-flavor" readlineFlavor)
] ++ lib.optionals (!forDevShell) [ ] ++ lib.optionals (!forDevShell) [
"--sysconfdir=/etc" "--sysconfdir=/etc"
] ++ lib.optionals installUnitTests [ ] ++ lib.optionals installUnitTests [
@ -284,7 +299,7 @@ in {
] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) ] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux"))
"LDFLAGS=-fuse-ld=gold" "LDFLAGS=-fuse-ld=gold"
++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell" ++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell"
++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"; ;
enableParallelBuilding = true; enableParallelBuilding = true;
@ -354,9 +369,6 @@ in {
# Nix proper (which they depend on). # Nix proper (which they depend on).
(installUnitTests -> doBuild) (installUnitTests -> doBuild)
(doCheck -> doBuild) (doCheck -> doBuild)
# We have to build the manual to build unreleased notes, as those
# are part of the manual
(buildUnreleasedNotes -> enableManual)
# The build process for the manual currently requires extracting # The build process for the manual currently requires extracting
# data from the Nix executable we are trying to document. # data from the Nix executable we are trying to document.
(enableManual -> doBuild) (enableManual -> doBuild)

2
perl/.yath.rc Normal file
View file

@ -0,0 +1,2 @@
[test]
-I=rel(lib/Nix)

View file

@ -5,12 +5,12 @@
, nix, curl, bzip2, xz, boost, libsodium, darwin , nix, curl, bzip2, xz, boost, libsodium, darwin
}: }:
perl.pkgs.toPerlModule (stdenv.mkDerivation { perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: {
name = "nix-perl-${nix.version}"; name = "nix-perl-${nix.version}";
src = fileset.toSource { src = fileset.toSource {
root = ../.; root = ../.;
fileset = fileset.unions [ fileset = fileset.unions ([
../.version ../.version
../m4 ../m4
../mk ../mk
@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
./configure.ac ./configure.ac
./lib ./lib
./local.mk ./local.mk
]; ] ++ lib.optionals finalAttrs.doCheck [
./.yath.rc
./t
]);
}; };
nativeBuildInputs = nativeBuildInputs =
@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
# `perlPackages.Test2Harness` is marked broken for Darwin
doCheck = !stdenv.isDarwin;
nativeCheckInputs = [
perlPackages.Test2Harness
];
configureFlags = [ configureFlags = [
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
enableParallelBuilding = true; enableParallelBuilding = true;
postUnpack = "sourceRoot=$sourceRoot/perl"; postUnpack = "sourceRoot=$sourceRoot/perl";
}) }))

View file

@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] );
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
our @EXPORT = qw( our @EXPORT = qw(
setVerbosity StoreWrapper
isValidPath queryReferences queryPathInfo queryDeriver queryPathHash StoreWrapper::new
queryPathFromHashPart StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash
topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths StoreWrapper::queryPathFromHashPart
StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths
StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath
StoreWrapper::derivationFromPath
StoreWrapper::addTempRoot
StoreWrapper::queryRawRealisation
hashPath hashFile hashString convertHash hashPath hashFile hashString convertHash
signString checkSignature signString checkSignature
addToStore makeFixedOutputPath
derivationFromPath
addTempRoot
getBinDir getStoreDir getBinDir getStoreDir
queryRawRealisation setVerbosity
); );
our $VERSION = '0.15'; our $VERSION = '0.15';

View file

@ -12,53 +12,66 @@
#include "realisation.hh" #include "realisation.hh"
#include "globals.hh" #include "globals.hh"
#include "store-api.hh" #include "store-api.hh"
#include "crypto.hh"
#include "posix-source-accessor.hh" #include "posix-source-accessor.hh"
#include <sodium.h> #include <sodium.h>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
using namespace nix; using namespace nix;
static bool libStoreInitialized = false;
static ref<Store> store() struct StoreWrapper {
{ ref<Store> store;
static std::shared_ptr<Store> _store; };
if (!_store) {
try {
initLibStore();
_store = openStore();
} catch (Error & e) {
croak("%s", e.what());
}
}
return ref<Store>(_store);
}
MODULE = Nix::Store PACKAGE = Nix::Store MODULE = Nix::Store PACKAGE = Nix::Store
PROTOTYPES: ENABLE PROTOTYPES: ENABLE
TYPEMAP: <<HERE
StoreWrapper * O_OBJECT
OUTPUT
O_OBJECT
sv_setref_pv( $arg, CLASS, (void*)$var );
INPUT
O_OBJECT
if ( sv_isobject($arg) && (SvTYPE(SvRV($arg)) == SVt_PVMG) ) {
$var = ($type)SvIV((SV*)SvRV( $arg ));
}
else {
warn( \"${Package}::$func_name() -- \"
\"$var not a blessed SV reference\");
XSRETURN_UNDEF;
}
HERE
#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang. #undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang.
#define dNOOP #define dNOOP
void
StoreWrapper::DESTROY()
void init() StoreWrapper *
CODE: StoreWrapper::new(char * s = nullptr)
store();
void setVerbosity(int level)
CODE:
verbosity = (Verbosity) level;
int isValidPath(char * path)
CODE: CODE:
static std::shared_ptr<Store> _store;
try { try {
RETVAL = store()->isValidPath(store()->parseStorePath(path)); if (!libStoreInitialized) {
initLibStore();
libStoreInitialized = true;
}
if (items == 1) {
_store = openStore();
RETVAL = new StoreWrapper {
.store = ref<Store>{_store}
};
} else {
RETVAL = new StoreWrapper {
.store = openStore(s)
};
}
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
@ -66,52 +79,81 @@ int isValidPath(char * path)
RETVAL RETVAL
SV * queryReferences(char * path) void init()
CODE:
if (!libStoreInitialized) {
initLibStore();
libStoreInitialized = true;
}
void setVerbosity(int level)
CODE:
verbosity = (Verbosity) level;
int
StoreWrapper::isValidPath(char * path)
CODE:
try {
RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path));
} catch (Error & e) {
croak("%s", e.what());
}
OUTPUT:
RETVAL
SV *
StoreWrapper::queryReferences(char * path)
PPCODE: PPCODE:
try { try {
for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references) for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references)
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * queryPathHash(char * path) SV *
StoreWrapper::queryPathHash(char * path)
PPCODE: PPCODE:
try { try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * queryDeriver(char * path) SV *
StoreWrapper::queryDeriver(char * path)
PPCODE: PPCODE:
try { try {
auto info = store()->queryPathInfo(store()->parseStorePath(path)); auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
if (!info->deriver) XSRETURN_UNDEF; if (!info->deriver) XSRETURN_UNDEF;
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * queryPathInfo(char * path, int base32) SV *
StoreWrapper::queryPathInfo(char * path, int base32)
PPCODE: PPCODE:
try { try {
auto info = store()->queryPathInfo(store()->parseStorePath(path)); auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
if (!info->deriver) if (!info->deriver)
XPUSHs(&PL_sv_undef); XPUSHs(&PL_sv_undef);
else else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true); auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime); mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize); mXPUSHi(info->narSize);
AV * refs = newAV(); AV * refs = newAV();
for (auto & i : info->references) for (auto & i : info->references)
av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0)); av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
XPUSHs(sv_2mortal(newRV((SV *) refs))); XPUSHs(sv_2mortal(newRV((SV *) refs)));
AV * sigs = newAV(); AV * sigs = newAV();
for (auto & i : info->sigs) for (auto & i : info->sigs)
@ -121,10 +163,11 @@ SV * queryPathInfo(char * path, int base32)
croak("%s", e.what()); croak("%s", e.what());
} }
SV * queryRawRealisation(char * outputId) SV *
StoreWrapper::queryRawRealisation(char * outputId)
PPCODE: PPCODE:
try { try {
auto realisation = store()->queryRealisation(DrvOutput::parse(outputId)); auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId));
if (realisation) if (realisation)
XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0)));
else else
@ -134,46 +177,50 @@ SV * queryRawRealisation(char * outputId)
} }
SV * queryPathFromHashPart(char * hashPart) SV *
StoreWrapper::queryPathFromHashPart(char * hashPart)
PPCODE: PPCODE:
try { try {
auto path = store()->queryPathFromHashPart(hashPart); auto path = THIS->store->queryPathFromHashPart(hashPart);
XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0))); XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * computeFSClosure(int flipDirection, int includeOutputs, ...) SV *
StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...)
PPCODE: PPCODE:
try { try {
StorePathSet paths; StorePathSet paths;
for (int n = 2; n < items; ++n) for (int n = 2; n < items; ++n)
store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
for (auto & i : paths) for (auto & i : paths)
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * topoSortPaths(...) SV *
StoreWrapper::topoSortPaths(...)
PPCODE: PPCODE:
try { try {
StorePathSet paths; StorePathSet paths;
for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
auto sorted = store()->topoSortPaths(paths); auto sorted = THIS->store->topoSortPaths(paths);
for (auto & i : sorted) for (auto & i : sorted)
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * followLinksToStorePath(char * path) SV *
StoreWrapper::followLinksToStorePath(char * path)
CODE: CODE:
try { try {
RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0); RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0);
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
@ -181,34 +228,37 @@ SV * followLinksToStorePath(char * path)
RETVAL RETVAL
void exportPaths(int fd, ...) void
StoreWrapper::exportPaths(int fd, ...)
PPCODE: PPCODE:
try { try {
StorePathSet paths; StorePathSet paths;
for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
FdSink sink(fd); FdSink sink(fd);
store()->exportPaths(paths, sink); THIS->store->exportPaths(paths, sink);
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
void importPaths(int fd, int dontCheckSigs) void
StoreWrapper::importPaths(int fd, int dontCheckSigs)
PPCODE: PPCODE:
try { try {
FdSource source(fd); FdSource source(fd);
store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * hashPath(char * algo, int base32, char * path) SV *
hashPath(char * algo, int base32, char * path)
PPCODE: PPCODE:
try { try {
PosixSourceAccessor accessor; auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
Hash h = hashPath( Hash h = hashPath(
accessor, CanonPath::fromCwd(path), accessor, canonPath,
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
@ -281,64 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg)
RETVAL RETVAL
SV * addToStore(char * srcPath, int recursive, char * algo) SV *
StoreWrapper::addToStore(char * srcPath, int recursive, char * algo)
PPCODE: PPCODE:
try { try {
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
PosixSourceAccessor accessor; auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath);
auto path = store()->addToStore( auto path = THIS->store->addToStore(
std::string(baseNameOf(srcPath)), std::string(baseNameOf(srcPath)),
accessor, CanonPath::fromCwd(srcPath), accessor, canonPath,
method, parseHashAlgo(algo)); method, parseHashAlgo(algo));
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) SV *
StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
PPCODE: PPCODE:
try { try {
auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto h = Hash::parseAny(hash, parseHashAlgo(algo));
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo { auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo {
.method = method, .method = method,
.hash = h, .hash = h,
.references = {}, .references = {},
}); });
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * derivationFromPath(char * drvPath) SV *
StoreWrapper::derivationFromPath(char * drvPath)
PREINIT: PREINIT:
HV *hash; HV *hash;
CODE: CODE:
try { try {
Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath)); Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath));
hash = newHV(); hash = newHV();
HV * outputs = newHV(); HV * outputs = newHV();
for (auto & i : drv.outputsAndOptPaths(*store())) { for (auto & i : drv.outputsAndOptPaths(*THIS->store)) {
hv_store( hv_store(
outputs, i.first.c_str(), i.first.size(), outputs, i.first.c_str(), i.first.size(),
!i.second.second !i.second.second
? newSV(0) /* null value */ ? newSV(0) /* null value */
: newSVpv(store()->printStorePath(*i.second.second).c_str(), 0), : newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0),
0); 0);
} }
hv_stores(hash, "outputs", newRV((SV *) outputs)); hv_stores(hash, "outputs", newRV((SV *) outputs));
AV * inputDrvs = newAV(); AV * inputDrvs = newAV();
for (auto & i : drv.inputDrvs.map) for (auto & i : drv.inputDrvs.map)
av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
AV * inputSrcs = newAV(); AV * inputSrcs = newAV();
for (auto & i : drv.inputSrcs) for (auto & i : drv.inputSrcs)
av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0)); av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs)); hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs));
hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0)); hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0));
@ -362,10 +415,11 @@ SV * derivationFromPath(char * drvPath)
RETVAL RETVAL
void addTempRoot(char * storePath) void
StoreWrapper::addTempRoot(char * storePath)
PPCODE: PPCODE:
try { try {
store()->addTempRoot(store()->parseStorePath(storePath)); THIS->store->addTempRoot(THIS->store->parseStorePath(storePath));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }

View file

@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
check: all
yath test

13
perl/t/init.t Normal file
View file

@ -0,0 +1,13 @@
use strict;
use warnings;
use Test2::V0;
use Nix::Store;
my $s = new Nix::Store("dummy://");
my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar");
ok(!$res, "should not have path");
done_testing;

View file

@ -102,7 +102,7 @@ poly_extra_try_me_commands() {
poly_configure_nix_daemon_service() { poly_configure_nix_daemon_service() {
task "Setting up the nix-daemon LaunchDaemon" task "Setting up the nix-daemon LaunchDaemon"
_sudo "to set up the nix-daemon as a LaunchDaemon" \ _sudo "to set up the nix-daemon as a LaunchDaemon" \
/usr/bin/install -m -rw-r--r-- "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST" /usr/bin/install -m "u=rw,go=r" "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
_sudo "to load the LaunchDaemon plist for nix-daemon" \ _sudo "to load the LaunchDaemon plist for nix-daemon" \
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist

View file

@ -137,11 +137,8 @@ static int main_build_remote(int argc, char * * argv)
for (auto & m : machines) { for (auto & m : machines) {
debug("considering building on remote machine '%s'", m.storeUri); debug("considering building on remote machine '%s'", m.storeUri);
if (m.enabled if (m.enabled &&
&& (neededSystem == "builtin" m.systemSupported(neededSystem) &&
|| std::find(m.systemTypes.begin(),
m.systemTypes.end(),
neededSystem) != m.systemTypes.end()) &&
m.allSupported(requiredFeatures) && m.allSupported(requiredFeatures) &&
m.mandatoryMet(requiredFeatures)) m.mandatoryMet(requiredFeatures))
{ {
@ -205,7 +202,7 @@ static int main_build_remote(int argc, char * * argv)
else else
drvstr = "<unknown>"; drvstr = "<unknown>";
auto error = hintformat(errorText); auto error = HintFmt(errorText);
error error
% drvstr % drvstr
% neededSystem % neededSystem
@ -214,7 +211,7 @@ static int main_build_remote(int argc, char * * argv)
for (auto & m : machines) for (auto & m : machines)
error error
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes) % concatStringsSep<StringSet>(", ", m.systemTypes)
% m.maxJobs % m.maxJobs
% concatStringsSep<StringSet>(", ", m.supportedFeatures) % concatStringsSep<StringSet>(", ", m.supportedFeatures)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures); % concatStringsSep<StringSet>(", ", m.mandatoryFeatures);

View file

@ -12,9 +12,9 @@ namespace nix {
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
{ \ { \
const MY_TYPE* me = this; \ const MY_TYPE* me = this; \
auto fields1 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \ auto fields1 = std::tie(*me->drvPath, me->FIELD); \
me = &other; \ me = &other; \
auto fields2 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \ auto fields2 = std::tie(*me->drvPath, me->FIELD); \
return fields1 COMPARATOR fields2; \ return fields1 COMPARATOR fields2; \
} }
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \

View file

@ -156,7 +156,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
for (auto & i : autoArgs) { for (auto & i : autoArgs) {
auto v = state.allocValue(); auto v = state.allocValue();
if (i.second[0] == 'E') if (i.second[0] == 'E')
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd()))); state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
else else
v->mkString(((std::string_view) i.second).substr(1)); v->mkString(((std::string_view) i.second).substr(1));
res.insert(state.symbols.create(i.first), v); res.insert(state.symbols.create(i.first), v);
@ -164,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
return res.finish(); return res.finish();
} }
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir) SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
{ {
if (EvalSettings::isPseudoUrl(s)) { if (EvalSettings::isPseudoUrl(s)) {
auto storePath = fetchers::downloadTarball( auto storePath = fetchers::downloadTarball(
@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi
} }
else else
return state.rootPath(CanonPath(s, baseDir)); return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s));
} }
} }

View file

@ -29,6 +29,6 @@ private:
std::map<std::string, std::string> autoArgs; std::map<std::string, std::string> autoArgs;
}; };
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd()); SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);
} }

View file

@ -1,5 +1,6 @@
#include "editor-for.hh" #include "editor-for.hh"
#include "environment-variables.hh" #include "environment-variables.hh"
#include "source-path.hh"
namespace nix { namespace nix {
@ -16,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line)
editor.find("vim") != std::string::npos || editor.find("vim") != std::string::npos ||
editor.find("kak") != std::string::npos)) editor.find("kak") != std::string::npos))
args.push_back(fmt("+%d", line)); args.push_back(fmt("+%d", line));
args.push_back(path->abs()); args.push_back(path->string());
return args; return args;
} }

View file

@ -2,7 +2,7 @@
///@file ///@file
#include "types.hh" #include "types.hh"
#include "input-accessor.hh" #include "source-path.hh"
namespace nix { namespace nix {

View file

@ -58,22 +58,22 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
Bindings & autoArgs = *cmd.getAutoArgs(*state); Bindings & autoArgs = *cmd.getAutoArgs(*state);
DrvInfos drvInfos; PackageInfos packageInfos;
getDerivations(*state, *v, "", autoArgs, drvInfos, false); getDerivations(*state, *v, "", autoArgs, packageInfos, false);
// Backward compatibility hack: group results by drvPath. This // Backward compatibility hack: group results by drvPath. This
// helps keep .all output together. // helps keep .all output together.
std::map<StorePath, OutputsSpec> byDrvPath; std::map<StorePath, OutputsSpec> byDrvPath;
for (auto & drvInfo : drvInfos) { for (auto & packageInfo : packageInfos) {
auto drvPath = drvInfo.queryDrvPath(); auto drvPath = packageInfo.queryDrvPath();
if (!drvPath) if (!drvPath)
throw Error("'%s' is not a derivation", what()); throw Error("'%s' is not a derivation", what());
auto newOutputs = std::visit(overloaded { auto newOutputs = std::visit(overloaded {
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
std::set<std::string> outputsToInstall; std::set<std::string> outputsToInstall;
for (auto & output : drvInfo.queryOutputs(false, true)) for (auto & output : packageInfo.queryOutputs(false, true))
outputsToInstall.insert(output.first); outputsToInstall.insert(output.first);
return OutputsSpec::Names { std::move(outputsToInstall) }; return OutputsSpec::Names { std::move(outputsToInstall) };
}, },

View file

@ -52,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs")); auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
assert(aOutputs); assert(aOutputs);
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); }); state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos));
return aOutputs->value; return aOutputs->value;
} }

View file

@ -1,5 +1,6 @@
#include "installable-value.hh" #include "installable-value.hh"
#include "eval-cache.hh" #include "eval-cache.hh"
#include "fetch-to-store.hh"
namespace nix { namespace nix {
@ -44,7 +45,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
{ {
if (v.type() == nPath) { if (v.type() == nPath) {
auto storePath = v.path().fetchToStore(*state->store); auto storePath = fetchToStore(*state->store, v.path());
return {{ return {{
.path = DerivedPath::Opaque { .path = DerivedPath::Opaque {
.path = std::move(storePath), .path = std::move(storePath),

View file

@ -6,7 +6,7 @@
namespace nix { namespace nix {
struct DrvInfo; struct PackageInfo;
struct SourceExprCommand; struct SourceExprCommand;
namespace eval_cache { class EvalCache; class AttrCursor; } namespace eval_cache { class EvalCache; class AttrCursor; }

View file

@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables(
state->eval(e, *vFile); state->eval(e, *vFile);
} }
else if (file) { else if (file) {
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile); auto dir = absPath(getCommandBaseDir());
state->evalFile(lookupFileArg(*state, *file, &dir), *vFile);
} }
else { else {
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir())); Path dir = absPath(getCommandBaseDir());
auto e = state->parseExprFromString(*expr, state->rootPath(dir)); auto e = state->parseExprFromString(*expr, state->rootPath(dir));
state->eval(e, *vFile); state->eval(e, *vFile);
} }
@ -715,7 +716,7 @@ BuiltPaths Installable::toBuiltPaths(
} }
} }
StorePathSet Installable::toStorePaths( StorePathSet Installable::toStorePathSet(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, OperateOn operateOn, Realise mode, OperateOn operateOn,
@ -729,13 +730,27 @@ StorePathSet Installable::toStorePaths(
return outPaths; return outPaths;
} }
StorePaths Installable::toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
const Installables & installables)
{
StorePaths outPaths;
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
auto thisOutPaths = path.outPaths();
outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end());
}
return outPaths;
}
StorePath Installable::toStorePath( StorePath Installable::toStorePath(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, OperateOn operateOn, Realise mode, OperateOn operateOn,
ref<Installable> installable) ref<Installable> installable)
{ {
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable}); auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable});
if (paths.size() != 1) if (paths.size() != 1)
throw Error("argument '%s' should evaluate to one store path", installable->what()); throw Error("argument '%s' should evaluate to one store path", installable->what());

View file

@ -12,7 +12,7 @@
namespace nix { namespace nix {
struct DrvInfo; struct PackageInfo;
enum class Realise { enum class Realise {
/** /**
@ -165,7 +165,14 @@ struct Installable
const Installables & installables, const Installables & installables,
BuildMode bMode = bmNormal); BuildMode bMode = bmNormal);
static std::set<StorePath> toStorePaths( static std::set<StorePath> toStorePathSet(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const Installables & installables);
static std::vector<StorePath> toStorePaths(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, Realise mode,

View file

@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS)
libcmd_LIBS = libstore libutil libexpr libmain libfetchers libcmd_LIBS = libstore libutil libexpr libmain libfetchers

View file

@ -4,12 +4,15 @@
#include "terminal.hh" #include "terminal.hh"
#include <sys/queue.h> #include <sys/queue.h>
#if HAVE_LOWDOWN
#include <lowdown.h> #include <lowdown.h>
#endif
namespace nix { namespace nix {
std::string renderMarkdownToTerminal(std::string_view markdown) std::string renderMarkdownToTerminal(std::string_view markdown)
{ {
#if HAVE_LOWDOWN
int windowWidth = getWindowSize().second; int windowWidth = getWindowSize().second;
struct lowdown_opts opts { struct lowdown_opts opts {
@ -48,6 +51,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
throw Error("allocation error while rendering Markdown"); throw Error("allocation error while rendering Markdown");
return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI()); return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
#else
return std::string(markdown);
#endif
} }
} }

View file

@ -5,7 +5,7 @@
#include <setjmp.h> #include <setjmp.h>
#ifdef READLINE #ifdef USE_READLINE
#include <readline/history.h> #include <readline/history.h>
#include <readline/readline.h> #include <readline/readline.h>
#else #else
@ -93,9 +93,18 @@ struct NixRepl
void evalString(std::string s, Value & v); void evalString(std::string s, Value & v);
void loadDebugTraceEnv(DebugTrace & dt); void loadDebugTraceEnv(DebugTrace & dt);
typedef std::set<Value *> ValuesSeen; void printValue(std::ostream & str,
std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); Value & v,
std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen); unsigned int maxDepth = std::numeric_limits<unsigned int>::max())
{
::nix::printValue(*state, str, v, PrintOptions {
.ansiColors = true,
.force = true,
.derivationPaths = true,
.maxDepth = maxDepth,
.prettyIndent = 2
});
}
}; };
std::string removeWhitespace(std::string s) std::string removeWhitespace(std::string s)
@ -112,7 +121,7 @@ NixRepl::NixRepl(const SearchPath & searchPath, nix::ref<Store> store, ref<EvalS
: AbstractNixRepl(state) : AbstractNixRepl(state)
, debugTraceIndex(0) , debugTraceIndex(0)
, getValues(getValues) , getValues(getValues)
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get())) , staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get()))
, historyFile(getDataDir() + "/nix/repl-history") , historyFile(getDataDir() + "/nix/repl-history")
{ {
} }
@ -221,10 +230,10 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
// prefer direct pos, but if noPos then try the expr. // prefer direct pos, but if noPos then try the expr.
auto pos = dt.pos auto pos = dt.pos
? dt.pos ? dt.pos
: static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]); : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
if (pos) { if (pos) {
out << pos; out << *pos;
if (auto loc = pos->getCodeLines()) { if (auto loc = pos->getCodeLines()) {
out << "\n"; out << "\n";
printCodeLines(out, "", *pos, *loc); printCodeLines(out, "", *pos, *loc);
@ -235,10 +244,19 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
return out; return out;
} }
static bool isFirstRepl = true;
void NixRepl::mainLoop() void NixRepl::mainLoop()
{ {
std::string error = ANSI_RED "error:" ANSI_NORMAL " "; if (isFirstRepl) {
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n"); std::string_view debuggerNotice = "";
if (state->debugRepl) {
debuggerNotice = " debugger";
}
notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice);
}
isFirstRepl = false;
loadFiles(); loadFiles();
@ -246,17 +264,17 @@ void NixRepl::mainLoop()
rl_readline_name = "nix-repl"; rl_readline_name = "nix-repl";
try { try {
createDirs(dirOf(historyFile)); createDirs(dirOf(historyFile));
} catch (SysError & e) { } catch (SystemError & e) {
logWarning(e.info()); logWarning(e.info());
} }
#ifndef READLINE #ifndef USE_READLINE
el_hist_size = 1000; el_hist_size = 1000;
#endif #endif
read_history(historyFile.c_str()); read_history(historyFile.c_str());
auto oldRepl = curRepl; auto oldRepl = curRepl;
curRepl = this; curRepl = this;
Finally restoreRepl([&] { curRepl = oldRepl; }); Finally restoreRepl([&] { curRepl = oldRepl; });
#ifndef READLINE #ifndef USE_READLINE
rl_set_complete_func(completionCallback); rl_set_complete_func(completionCallback);
rl_set_list_possib_func(listPossibleCallback); rl_set_list_possib_func(listPossibleCallback);
#endif #endif
@ -414,8 +432,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
// Quietly ignore parse errors. // Quietly ignore parse errors.
} catch (EvalError & e) { } catch (EvalError & e) {
// Quietly ignore evaluation errors. // Quietly ignore evaluation errors.
} catch (UndefinedVarError & e) {
// Quietly ignore undefined variable errors.
} catch (BadURL & e) { } catch (BadURL & e) {
// Quietly ignore BadURL flake-related errors. // Quietly ignore BadURL flake-related errors.
} }
@ -442,10 +458,10 @@ static bool isVarName(std::string_view s)
StorePath NixRepl::getDerivationPath(Value & v) { StorePath NixRepl::getDerivationPath(Value & v) {
auto drvInfo = getDerivation(*state, v, false); auto packageInfo = getDerivation(*state, v, false);
if (!drvInfo) if (!packageInfo)
throw Error("expression does not evaluate to a derivation, so I can't build it"); throw Error("expression does not evaluate to a derivation, so I can't build it");
auto drvPath = drvInfo->queryDrvPath(); auto drvPath = packageInfo->queryDrvPath();
if (!drvPath) if (!drvPath)
throw Error("expression did not evaluate to a valid derivation (no 'drvPath' attribute)"); throw Error("expression did not evaluate to a valid derivation (no 'drvPath' attribute)");
if (!state->store->isValidPath(*drvPath)) if (!state->store->isValidPath(*drvPath))
@ -708,7 +724,8 @@ bool NixRepl::processLine(std::string line)
else if (command == ":p" || command == ":print") { else if (command == ":p" || command == ":print") {
Value v; Value v;
evalString(arg, v); evalString(arg, v);
printValue(std::cout, v, 1000000000) << std::endl; printValue(std::cout, v);
std::cout << std::endl;
} }
else if (command == ":q" || command == ":quit") { else if (command == ":q" || command == ":quit") {
@ -770,7 +787,8 @@ bool NixRepl::processLine(std::string line)
} else { } else {
Value v; Value v;
evalString(line, v); evalString(line, v);
printValue(std::cout, v, 1) << std::endl; printValue(std::cout, v, 1);
std::cout << std::endl;
} }
} }
@ -880,7 +898,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
Expr * NixRepl::parseString(std::string s) Expr * NixRepl::parseString(std::string s)
{ {
return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv); return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv);
} }
@ -888,145 +906,7 @@ void NixRepl::evalString(std::string s, Value & v)
{ {
Expr * e = parseString(s); Expr * e = parseString(s);
e->eval(*state, *env, v); e->eval(*state, *env, v);
state->forceValue(v, [&]() { return v.determinePos(noPos); }); state->forceValue(v, v.determinePos(noPos));
}
std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth)
{
ValuesSeen seen;
return printValue(str, v, maxDepth, seen);
}
// FIXME: lot of cut&paste from Nix's eval.cc.
std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen)
{
str.flush();
checkInterrupt();
state->forceValue(v, [&]() { return v.determinePos(noPos); });
switch (v.type()) {
case nInt:
str << ANSI_CYAN << v.integer << ANSI_NORMAL;
break;
case nBool:
str << ANSI_CYAN;
printLiteralBool(str, v.boolean);
str << ANSI_NORMAL;
break;
case nString:
str << ANSI_WARNING;
printLiteralString(str, v.string_view());
str << ANSI_NORMAL;
break;
case nPath:
str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping?
break;
case nNull:
str << ANSI_CYAN "null" ANSI_NORMAL;
break;
case nAttrs: {
seen.insert(&v);
bool isDrv = state->isDerivation(v);
if (isDrv) {
str << "«derivation ";
Bindings::iterator i = v.attrs->find(state->sDrvPath);
NixStringContext context;
if (i != v.attrs->end())
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
else
str << "???";
str << "»";
}
else if (maxDepth > 0) {
str << "{ ";
typedef std::map<std::string, Value *> Sorted;
Sorted sorted;
for (auto & i : *v.attrs)
sorted.emplace(state->symbols[i.name], i.value);
for (auto & i : sorted) {
printAttributeName(str, i.first);
str << " = ";
if (seen.count(i.second))
str << "«repeated»";
else
try {
printValue(str, *i.second, maxDepth - 1, seen);
} catch (AssertionError & e) {
str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL;
}
str << "; ";
}
str << "}";
} else
str << "{ ... }";
break;
}
case nList:
seen.insert(&v);
str << "[ ";
if (maxDepth > 0)
for (auto elem : v.listItems()) {
if (seen.count(elem))
str << "«repeated»";
else
try {
printValue(str, *elem, maxDepth - 1, seen);
} catch (AssertionError & e) {
str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL;
}
str << " ";
}
else
str << "... ";
str << "]";
break;
case nFunction:
if (v.isLambda()) {
std::ostringstream s;
s << state->positions[v.lambda.fun->pos];
str << ANSI_BLUE "«lambda @ " << filterANSIEscapes(s.str()) << "»" ANSI_NORMAL;
} else if (v.isPrimOp()) {
str << ANSI_MAGENTA "«primop»" ANSI_NORMAL;
} else if (v.isPrimOpApp()) {
str << ANSI_BLUE "«primop-app»" ANSI_NORMAL;
} else {
abort();
}
break;
case nFloat:
str << v.fpoint;
break;
case nThunk:
case nExternal:
default:
str << ANSI_RED "«unknown»" ANSI_NORMAL;
break;
}
return str;
} }

View file

@ -65,10 +65,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
if (!attrIndex) { if (!attrIndex) {
if (v->type() != nAttrs) if (v->type() != nAttrs)
throw TypeError( state.error<TypeError>(
"the expression selected by the selection path '%1%' should be a set but is %2%", "the expression selected by the selection path '%1%' should be a set but is %2%",
attrPath, attrPath,
showType(*v)); showType(*v)).debugThrow();
if (attr.empty()) if (attr.empty())
throw Error("empty attribute name in selection path '%1%'", attrPath); throw Error("empty attribute name in selection path '%1%'", attrPath);
@ -88,10 +88,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
else { else {
if (!v->isList()) if (!v->isList())
throw TypeError( state.error<TypeError>(
"the expression selected by the selection path '%1%' should be a list but is %2%", "the expression selected by the selection path '%1%' should be a list but is %2%",
attrPath, attrPath,
showType(*v)); showType(*v)).debugThrow();
if (*attrIndex >= v->listSize()) if (*attrIndex >= v->listSize())
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath); throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);

View file

@ -491,7 +491,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
if (forceErrors) if (forceErrors)
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name)); debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
else else
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name)); throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name));
} else } else
return std::make_shared<AttrCursor>(root, return std::make_shared<AttrCursor>(root,
std::make_pair(shared_from_this(), name), nullptr, std::move(attr)); std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
@ -500,7 +500,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
// evaluate to see whether 'name' exists // evaluate to see whether 'name' exists
} else } else
return nullptr; return nullptr;
//throw TypeError("'%s' is not an attribute set", getAttrPathStr()); //error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
} }
} }
@ -508,7 +508,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
if (v.type() != nAttrs) if (v.type() != nAttrs)
return nullptr; return nullptr;
//throw TypeError("'%s' is not an attribute set", getAttrPathStr()); //error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
auto attr = v.attrs->get(name); auto attr = v.attrs->get(name);
@ -574,14 +574,14 @@ std::string AttrCursor::getString()
debug("using cached string attribute '%s'", getAttrPathStr()); debug("using cached string attribute '%s'", getAttrPathStr());
return s->first; return s->first;
} else } else
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
} }
} }
auto & v = forceValue(); auto & v = forceValue();
if (v.type() != nString && v.type() != nPath) if (v.type() != nString && v.type() != nPath)
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
return v.type() == nString ? v.c_str() : v.path().to_string(); return v.type() == nString ? v.c_str() : v.path().to_string();
} }
@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext()
return *s; return *s;
} }
} else } else
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
} }
} }
@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext()
else if (v.type() == nPath) else if (v.type() == nPath)
return {v.path().to_string(), {}}; return {v.path().to_string(), {}};
else else
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
} }
bool AttrCursor::getBool() bool AttrCursor::getBool()
@ -643,14 +643,14 @@ bool AttrCursor::getBool()
debug("using cached Boolean attribute '%s'", getAttrPathStr()); debug("using cached Boolean attribute '%s'", getAttrPathStr());
return *b; return *b;
} else } else
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
} }
} }
auto & v = forceValue(); auto & v = forceValue();
if (v.type() != nBool) if (v.type() != nBool)
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
return v.boolean; return v.boolean;
} }
@ -665,14 +665,14 @@ NixInt AttrCursor::getInt()
debug("using cached integer attribute '%s'", getAttrPathStr()); debug("using cached integer attribute '%s'", getAttrPathStr());
return i->x; return i->x;
} else } else
throw TypeError("'%s' is not an integer", getAttrPathStr()); root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
} }
} }
auto & v = forceValue(); auto & v = forceValue();
if (v.type() != nInt) if (v.type() != nInt)
throw TypeError("'%s' is not an integer", getAttrPathStr()); root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
return v.integer; return v.integer;
} }
@ -687,7 +687,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
debug("using cached list of strings attribute '%s'", getAttrPathStr()); debug("using cached list of strings attribute '%s'", getAttrPathStr());
return *l; return *l;
} else } else
throw TypeError("'%s' is not a list of strings", getAttrPathStr()); root->state.error<TypeError>("'%s' is not a list of strings", getAttrPathStr()).debugThrow();
} }
} }
@ -697,7 +697,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
root->state.forceValue(v, noPos); root->state.forceValue(v, noPos);
if (v.type() != nList) if (v.type() != nList)
throw TypeError("'%s' is not a list", getAttrPathStr()); root->state.error<TypeError>("'%s' is not a list", getAttrPathStr()).debugThrow();
std::vector<std::string> res; std::vector<std::string> res;
@ -720,14 +720,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
debug("using cached attrset attribute '%s'", getAttrPathStr()); debug("using cached attrset attribute '%s'", getAttrPathStr());
return *attrs; return *attrs;
} else } else
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
} }
} }
auto & v = forceValue(); auto & v = forceValue();
if (v.type() != nAttrs) if (v.type() != nAttrs)
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>(); root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
std::vector<Symbol> attrs; std::vector<Symbol> attrs;
for (auto & attr : *getValue().attrs) for (auto & attr : *getValue().attrs)

113
src/libexpr/eval-error.cc Normal file
View file

@ -0,0 +1,113 @@
#include "eval-error.hh"
#include "eval.hh"
#include "value.hh"
namespace nix {
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withExitStatus(unsigned int exitStatus)
{
error.withExitStatus(exitStatus);
return *this;
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(PosIdx pos)
{
error.err.pos = error.state.positions[pos];
return *this;
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(Value & value, PosIdx fallback)
{
return atPos(value.determinePos(fallback));
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withTrace(PosIdx pos, const std::string_view text)
{
error.err.traces.push_front(
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false});
return *this;
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrameTrace(PosIdx pos, const std::string_view text)
{
error.err.traces.push_front(
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true});
return *this;
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withSuggestions(Suggestions & s)
{
error.err.suggestions = s;
return *this;
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr & expr)
{
// NOTE: This is abusing side-effects.
// TODO: check compatibility with nested debugger calls.
// TODO: What side-effects??
error.state.debugTraces.push_front(DebugTrace{
.pos = error.state.positions[expr.getPos()],
.expr = expr,
.env = env,
.hint = HintFmt("Fake frame for debugging purposes"),
.isError = true});
return *this;
}
template<class T>
EvalErrorBuilder<T> & EvalErrorBuilder<T>::addTrace(PosIdx pos, HintFmt hint, bool frame)
{
error.addTrace(error.state.positions[pos], hint, frame);
return *this;
}
template<class T>
template<typename... Args>
EvalErrorBuilder<T> &
EvalErrorBuilder<T>::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs)
{
addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...));
return *this;
}
template<class T>
void EvalErrorBuilder<T>::debugThrow()
{
if (error.state.debugRepl && !error.state.debugTraces.empty()) {
const DebugTrace & last = error.state.debugTraces.front();
const Env * env = &last.env;
const Expr * expr = &last.expr;
error.state.runDebugRepl(&error, *env, *expr);
}
// `EvalState` is the only class that can construct an `EvalErrorBuilder`,
// and it does so in dynamic storage. This is the final method called on
// any such instance and must delete itself before throwing the underlying
// error.
auto error = std::move(this->error);
delete this;
throw error;
}
template class EvalErrorBuilder<EvalError>;
template class EvalErrorBuilder<AssertionError>;
template class EvalErrorBuilder<ThrownError>;
template class EvalErrorBuilder<Abort>;
template class EvalErrorBuilder<TypeError>;
template class EvalErrorBuilder<UndefinedVarError>;
template class EvalErrorBuilder<MissingArgumentError>;
template class EvalErrorBuilder<InfiniteRecursionError>;
template class EvalErrorBuilder<CachedEvalError>;
template class EvalErrorBuilder<InvalidPathError>;
}

104
src/libexpr/eval-error.hh Normal file
View file

@ -0,0 +1,104 @@
#pragma once
#include <algorithm>
#include "error.hh"
#include "pos-idx.hh"
namespace nix {
struct Env;
struct Expr;
struct Value;
class EvalState;
template<class T>
class EvalErrorBuilder;
class EvalError : public Error
{
template<class T>
friend class EvalErrorBuilder;
public:
EvalState & state;
EvalError(EvalState & state, ErrorInfo && errorInfo)
: Error(errorInfo)
, state(state)
{
}
template<typename... Args>
explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs)
: Error(formatString, formatArgs...)
, state(state)
{
}
};
MakeError(ParseError, Error);
MakeError(AssertionError, EvalError);
MakeError(ThrownError, AssertionError);
MakeError(Abort, EvalError);
MakeError(TypeError, EvalError);
MakeError(UndefinedVarError, EvalError);
MakeError(MissingArgumentError, EvalError);
MakeError(CachedEvalError, EvalError);
MakeError(InfiniteRecursionError, EvalError);
struct InvalidPathError : public EvalError
{
public:
Path path;
InvalidPathError(EvalState & state, const Path & path)
: EvalError(state, "path '%s' is not valid", path)
{
}
};
/**
* `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow`
* method must be the final method in any such `EvalErrorBuilder` usage, and it
* handles deleting the object.
*/
template<class T>
class EvalErrorBuilder final
{
friend class EvalState;
template<typename... Args>
explicit EvalErrorBuilder(EvalState & state, const Args &... args)
: error(T(state, args...))
{
}
public:
T error;
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withExitStatus(unsigned int exitStatus);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(PosIdx pos);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(Value & value, PosIdx fallback = noPos);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withTrace(PosIdx pos, const std::string_view text);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrameTrace(PosIdx pos, const std::string_view text);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withSuggestions(Suggestions & s);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrame(const Env & e, const Expr & ex);
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & addTrace(PosIdx pos, HintFmt hint, bool frame = false);
template<typename... Args>
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> &
addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs);
/**
* Delete the `EvalErrorBuilder` and throw the underlying exception.
*/
[[gnu::noinline, gnu::noreturn]] void debugThrow();
};
}

View file

@ -1,7 +1,9 @@
#pragma once #pragma once
///@file ///@file
#include "print.hh"
#include "eval.hh" #include "eval.hh"
#include "eval-error.hh"
namespace nix { namespace nix {
@ -73,8 +75,6 @@ Env & EvalState::allocEnv(size_t size)
#endif #endif
env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *));
env->type = Env::Plain;
/* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */
return *env; return *env;
@ -83,13 +83,6 @@ Env & EvalState::allocEnv(size_t size)
[[gnu::always_inline]] [[gnu::always_inline]]
void EvalState::forceValue(Value & v, const PosIdx pos) void EvalState::forceValue(Value & v, const PosIdx pos)
{
forceValue(v, [&]() { return pos; });
}
template<typename Callable>
void EvalState::forceValue(Value & v, Callable getPos)
{ {
if (v.isThunk()) { if (v.isThunk()) {
Env * env = v.thunk.env; Env * env = v.thunk.env;
@ -100,15 +93,12 @@ void EvalState::forceValue(Value & v, Callable getPos)
expr->eval(*this, *env, v); expr->eval(*this, *env, v);
} catch (...) { } catch (...) {
v.mkThunk(env, expr); v.mkThunk(env, expr);
tryFixupBlackHolePos(v, pos);
throw; throw;
} }
} }
else if (v.isApp()) { else if (v.isApp())
PosIdx pos = getPos();
callFunction(*v.app.left, *v.app.right, v, pos); callFunction(*v.app.left, *v.app.right, v, pos);
}
else if (v.isBlackhole())
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
} }
@ -126,7 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
PosIdx pos = getPos(); PosIdx pos = getPos();
forceValue(v, pos); forceValue(v, pos);
if (v.type() != nAttrs) { if (v.type() != nAttrs) {
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>(); error<TypeError>(
"expected a set but found %1%: %2%",
showType(v),
ValuePrinter(*this, v, errorPrintOptions)
).withTrace(pos, errorCtx).debugThrow();
} }
} }
@ -136,7 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e
{ {
forceValue(v, pos); forceValue(v, pos);
if (!v.isList()) { if (!v.isList()) {
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>(); error<TypeError>(
"expected a list but found %1%: %2%",
showType(v),
ValuePrinter(*this, v, errorPrintOptions)
).withTrace(pos, errorCtx).debugThrow();
} }
} }

View file

@ -124,6 +124,9 @@ struct EvalSettings : Config
Setting<bool> traceVerbose{this, false, "trace-verbose", Setting<bool> traceVerbose{this, false, "trace-verbose",
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."}; "Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
Setting<unsigned int> maxCallDepth{this, 10000, "max-call-depth",
"The maximum function call depth to allow before erroring."};
}; };
extern EvalSettings evalSettings; extern EvalSettings evalSettings;

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,7 @@
///@file ///@file
#include "attr-set.hh" #include "attr-set.hh"
#include "eval-error.hh"
#include "types.hh" #include "types.hh"
#include "value.hh" #include "value.hh"
#include "nixexpr.hh" #include "nixexpr.hh"
@ -84,6 +85,8 @@ struct PrimOp
void check(); void check();
}; };
std::ostream & operator<<(std::ostream & output, PrimOp & primOp);
/** /**
* Info about a constant * Info about a constant
*/ */
@ -116,11 +119,6 @@ struct Constant
struct Env struct Env
{ {
Env * up; Env * up;
/**
* Number of of levels up to next `with` environment
*/
unsigned short prevWith:14;
enum { Plain = 0, HasWithExpr, HasWithAttrs } type:2;
Value * values[0]; Value * values[0];
}; };
@ -132,7 +130,7 @@ std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const Stati
void copyContext(const Value & v, NixStringContext & context); void copyContext(const Value & v, NixStringContext & context);
std::string printValue(const EvalState & state, const Value & v); std::string printValue(EvalState & state, Value & v);
std::ostream & operator << (std::ostream & os, const ValueType t); std::ostream & operator << (std::ostream & os, const ValueType t);
@ -147,52 +145,13 @@ struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache(); std::shared_ptr<RegexCache> makeRegexCache();
struct DebugTrace { struct DebugTrace {
std::shared_ptr<AbstractPos> pos; std::shared_ptr<Pos> pos;
const Expr & expr; const Expr & expr;
const Env & env; const Env & env;
hintformat hint; HintFmt hint;
bool isError; bool isError;
}; };
void debugError(Error * e, Env & env, Expr & expr);
class ErrorBuilder
{
private:
EvalState & state;
ErrorInfo info;
ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { }
public:
template<typename... Args>
[[nodiscard, gnu::noinline]]
static ErrorBuilder * create(EvalState & s, const Args & ... args)
{
return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) });
}
[[nodiscard, gnu::noinline]]
ErrorBuilder & atPos(PosIdx pos);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withTrace(PosIdx pos, const std::string_view text);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withSuggestions(Suggestions & s);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withFrame(const Env & e, const Expr & ex);
template<class ErrorType>
[[gnu::noinline, gnu::noreturn]]
void debugThrow();
};
class EvalState : public std::enable_shared_from_this<EvalState> class EvalState : public std::enable_shared_from_this<EvalState>
{ {
public: public:
@ -210,6 +169,8 @@ public:
sPrefix, sPrefix,
sOutputSpecified; sOutputSpecified;
const Expr::AstSymbols exprSymbols;
/** /**
* If set, force copying files to the Nix store even if they * If set, force copying files to the Nix store even if they
* already exist there. * already exist there.
@ -218,6 +179,11 @@ public:
Bindings emptyBindings; Bindings emptyBindings;
/**
* Empty list constant.
*/
Value vEmptyList;
/** /**
* The accessor for the root filesystem. * The accessor for the root filesystem.
*/ */
@ -270,39 +236,11 @@ public:
void runDebugRepl(const Error * error, const Env & env, const Expr & expr); void runDebugRepl(const Error * error, const Env & env, const Expr & expr);
template<class E> template<class T, typename... Args>
[[gnu::noinline, gnu::noreturn]]
void debugThrowLastTrace(E && error)
{
debugThrow(error, nullptr, nullptr);
}
template<class E>
[[gnu::noinline, gnu::noreturn]]
void debugThrow(E && error, const Env * env, const Expr * expr)
{
if (debugRepl && ((env && expr) || !debugTraces.empty())) {
if (!env || !expr) {
const DebugTrace & last = debugTraces.front();
env = &last.env;
expr = &last.expr;
}
runDebugRepl(&error, *env, *expr);
}
throw std::move(error);
}
// This is dangerous, but gets in line with the idea that error creation and
// throwing should not allocate on the stack of hot functions.
// as long as errors are immediately thrown, it works.
ErrorBuilder * errorBuilder;
template<typename... Args>
[[nodiscard, gnu::noinline]] [[nodiscard, gnu::noinline]]
ErrorBuilder & error(const Args & ... args) { EvalErrorBuilder<T> & error(const Args & ... args) {
errorBuilder = ErrorBuilder::create(*this, args...); // `EvalErrorBuilder::debugThrow` performs the corresponding `delete`.
return *errorBuilder; return *new EvalErrorBuilder<T>(*this, args...);
} }
private: private:
@ -368,6 +306,11 @@ public:
*/ */
SourcePath rootPath(CanonPath path); SourcePath rootPath(CanonPath path);
/**
* Variant which accepts relative paths too.
*/
SourcePath rootPath(PathView path);
/** /**
* Allow access to a path. * Allow access to a path.
*/ */
@ -460,8 +403,7 @@ public:
*/ */
inline void forceValue(Value & v, const PosIdx pos); inline void forceValue(Value & v, const PosIdx pos);
template <typename Callable> void tryFixupBlackHolePos(Value & v, PosIdx pos);
inline void forceValue(Value & v, Callable getPos);
/** /**
* Force a value, then recursively force list elements and * Force a value, then recursively force list elements and
@ -623,6 +565,11 @@ private:
const SourcePath & basePath, const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv); std::shared_ptr<StaticEnv> & staticEnv);
/**
* Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack.
*/
size_t callDepth = 0;
public: public:
/** /**
@ -837,22 +784,6 @@ SourcePath resolveExprPath(SourcePath path);
*/ */
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
struct InvalidPathError : EvalError
{
Path path;
InvalidPathError(const Path & path);
#ifdef EXCEPTION_NEEDS_THROW_SPEC
~InvalidPathError() throw () { };
#endif
};
template<class ErrorType>
void ErrorBuilder::debugThrow()
{
// NOTE: We always use the -LastTrace version as we push the new trace in withFrame()
state.debugThrowLastTrace(ErrorType(info));
}
} }
#include "eval-inline.hh" #include "eval-inline.hh"

View file

@ -147,15 +147,15 @@ static FlakeInput parseFlakeInput(EvalState & state,
NixStringContext emptyContext = {}; NixStringContext emptyContext = {};
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump()); attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
} else } else
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", state.error<TypeError>("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value)); state.symbols[attr.name], showType(*attr.value)).debugThrow();
} }
#pragma GCC diagnostic pop #pragma GCC diagnostic pop
} }
} catch (Error & e) { } catch (Error & e) {
e.addTrace( e.addTrace(
state.positions[attr.pos], state.positions[attr.pos],
hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
throw; throw;
} }
} }
@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
try { try {
input.ref = FlakeRef::fromAttrs(attrs); input.ref = FlakeRef::fromAttrs(attrs);
} catch (Error & e) { } catch (Error & e) {
e.addTrace(state.positions[pos], hintfmt("while evaluating flake input")); e.addTrace(state.positions[pos], HintFmt("while evaluating flake input"));
throw; throw;
} }
else { else {
@ -295,15 +295,15 @@ static Flake getFlake(
std::vector<std::string> ss; std::vector<std::string> ss;
for (auto elem : setting.value->listItems()) { for (auto elem : setting.value->listItems()) {
if (elem->type() != nString) if (elem->type() != nString)
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected", state.error<TypeError>("list element in flake configuration setting '%s' is %s while a string is expected",
state.symbols[setting.name], showType(*setting.value)); state.symbols[setting.name], showType(*setting.value)).debugThrow();
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, "")); ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
} }
flake.config.settings.emplace(state.symbols[setting.name], ss); flake.config.settings.emplace(state.symbols[setting.name], ss);
} }
else else
throw TypeError("flake configuration setting '%s' is %s", state.error<TypeError>("flake configuration setting '%s' is %s",
state.symbols[setting.name], showType(*setting.value)); state.symbols[setting.name], showType(*setting.value)).debugThrow();
} }
} }
@ -865,11 +865,11 @@ static void prim_flakeRefToString(
attrs.emplace(state.symbols[attr.name], attrs.emplace(state.symbols[attr.name],
std::string(attr.value->string_view())); std::string(attr.value->string_view()));
} else { } else {
state.error( state.error<EvalError>(
"flake reference attribute sets may only contain integers, Booleans, " "flake reference attribute sets may only contain integers, Booleans, "
"and strings, but attribute '%s' is %s", "and strings, but attribute '%s' is %s",
state.symbols[attr.name], state.symbols[attr.name],
showType(*attr.value)).debugThrow<EvalError>(); showType(*attr.value)).debugThrow();
} }
} }
auto flakeRef = FlakeRef::fromAttrs(attrs); auto flakeRef = FlakeRef::fromAttrs(attrs);

View file

@ -4,7 +4,7 @@
namespace nix { namespace nix {
static const std::string attributeNamePattern("[a-z0-9_-]+"); static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?"); static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");

View file

@ -11,13 +11,13 @@
namespace nix { namespace nix {
DrvInfo::DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs) PackageInfo::PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs)
: state(&state), attrs(attrs), attrPath(std::move(attrPath)) : state(&state), attrs(attrs), attrPath(std::move(attrPath))
{ {
} }
DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs) PackageInfo::PackageInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs)
: state(&state), attrs(nullptr), attrPath("") : state(&state), attrs(nullptr), attrPath("")
{ {
auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs); auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs);
@ -45,18 +45,18 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
} }
std::string DrvInfo::queryName() const std::string PackageInfo::queryName() const
{ {
if (name == "" && attrs) { if (name == "" && attrs) {
auto i = attrs->find(state->sName); auto i = attrs->find(state->sName);
if (i == attrs->end()) throw TypeError("derivation name missing"); if (i == attrs->end()) state->error<TypeError>("derivation name missing").debugThrow();
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
} }
return name; return name;
} }
std::string DrvInfo::querySystem() const std::string PackageInfo::querySystem() const
{ {
if (system == "" && attrs) { if (system == "" && attrs) {
auto i = attrs->find(state->sSystem); auto i = attrs->find(state->sSystem);
@ -66,7 +66,7 @@ std::string DrvInfo::querySystem() const
} }
std::optional<StorePath> DrvInfo::queryDrvPath() const std::optional<StorePath> PackageInfo::queryDrvPath() const
{ {
if (!drvPath && attrs) { if (!drvPath && attrs) {
Bindings::iterator i = attrs->find(state->sDrvPath); Bindings::iterator i = attrs->find(state->sDrvPath);
@ -80,7 +80,7 @@ std::optional<StorePath> DrvInfo::queryDrvPath() const
} }
StorePath DrvInfo::requireDrvPath() const StorePath PackageInfo::requireDrvPath() const
{ {
if (auto drvPath = queryDrvPath()) if (auto drvPath = queryDrvPath())
return *drvPath; return *drvPath;
@ -88,7 +88,7 @@ StorePath DrvInfo::requireDrvPath() const
} }
StorePath DrvInfo::queryOutPath() const StorePath PackageInfo::queryOutPath() const
{ {
if (!outPath && attrs) { if (!outPath && attrs) {
Bindings::iterator i = attrs->find(state->sOutPath); Bindings::iterator i = attrs->find(state->sOutPath);
@ -102,7 +102,7 @@ StorePath DrvInfo::queryOutPath() const
} }
DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall)
{ {
if (outputs.empty()) { if (outputs.empty()) {
/* Get the outputs list. */ /* Get the outputs list. */
@ -164,7 +164,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
} }
std::string DrvInfo::queryOutputName() const std::string PackageInfo::queryOutputName() const
{ {
if (outputName == "" && attrs) { if (outputName == "" && attrs) {
Bindings::iterator i = attrs->find(state->sOutputName); Bindings::iterator i = attrs->find(state->sOutputName);
@ -174,7 +174,7 @@ std::string DrvInfo::queryOutputName() const
} }
Bindings * DrvInfo::getMeta() Bindings * PackageInfo::getMeta()
{ {
if (meta) return meta; if (meta) return meta;
if (!attrs) return 0; if (!attrs) return 0;
@ -186,7 +186,7 @@ Bindings * DrvInfo::getMeta()
} }
StringSet DrvInfo::queryMetaNames() StringSet PackageInfo::queryMetaNames()
{ {
StringSet res; StringSet res;
if (!getMeta()) return res; if (!getMeta()) return res;
@ -196,9 +196,9 @@ StringSet DrvInfo::queryMetaNames()
} }
bool DrvInfo::checkMeta(Value & v) bool PackageInfo::checkMeta(Value & v)
{ {
state->forceValue(v, [&]() { return v.determinePos(noPos); }); state->forceValue(v, v.determinePos(noPos));
if (v.type() == nList) { if (v.type() == nList) {
for (auto elem : v.listItems()) for (auto elem : v.listItems())
if (!checkMeta(*elem)) return false; if (!checkMeta(*elem)) return false;
@ -216,7 +216,7 @@ bool DrvInfo::checkMeta(Value & v)
} }
Value * DrvInfo::queryMeta(const std::string & name) Value * PackageInfo::queryMeta(const std::string & name)
{ {
if (!getMeta()) return 0; if (!getMeta()) return 0;
Bindings::iterator a = meta->find(state->symbols.create(name)); Bindings::iterator a = meta->find(state->symbols.create(name));
@ -225,7 +225,7 @@ Value * DrvInfo::queryMeta(const std::string & name)
} }
std::string DrvInfo::queryMetaString(const std::string & name) std::string PackageInfo::queryMetaString(const std::string & name)
{ {
Value * v = queryMeta(name); Value * v = queryMeta(name);
if (!v || v->type() != nString) return ""; if (!v || v->type() != nString) return "";
@ -233,7 +233,7 @@ std::string DrvInfo::queryMetaString(const std::string & name)
} }
NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def) NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def)
{ {
Value * v = queryMeta(name); Value * v = queryMeta(name);
if (!v) return def; if (!v) return def;
@ -247,7 +247,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
return def; return def;
} }
NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def) NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def)
{ {
Value * v = queryMeta(name); Value * v = queryMeta(name);
if (!v) return def; if (!v) return def;
@ -262,7 +262,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
} }
bool DrvInfo::queryMetaBool(const std::string & name, bool def) bool PackageInfo::queryMetaBool(const std::string & name, bool def)
{ {
Value * v = queryMeta(name); Value * v = queryMeta(name);
if (!v) return def; if (!v) return def;
@ -277,7 +277,7 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def)
} }
void DrvInfo::setMeta(const std::string & name, Value * v) void PackageInfo::setMeta(const std::string & name, Value * v)
{ {
getMeta(); getMeta();
auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0)); auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0));
@ -300,18 +300,18 @@ typedef std::set<Bindings *> Done;
The result boolean indicates whether it makes sense The result boolean indicates whether it makes sense
for the caller to recursively search for derivations in `v'. */ for the caller to recursively search for derivations in `v'. */
static bool getDerivation(EvalState & state, Value & v, static bool getDerivation(EvalState & state, Value & v,
const std::string & attrPath, DrvInfos & drvs, Done & done, const std::string & attrPath, PackageInfos & drvs, Done & done,
bool ignoreAssertionFailures) bool ignoreAssertionFailures)
{ {
try { try {
state.forceValue(v, [&]() { return v.determinePos(noPos); }); state.forceValue(v, v.determinePos(noPos));
if (!state.isDerivation(v)) return true; if (!state.isDerivation(v)) return true;
/* Remove spurious duplicates (e.g., a set like `rec { x = /* Remove spurious duplicates (e.g., a set like `rec { x =
derivation {...}; y = x;}'. */ derivation {...}; y = x;}'. */
if (!done.insert(v.attrs).second) return false; if (!done.insert(v.attrs).second) return false;
DrvInfo drv(state, attrPath, v.attrs); PackageInfo drv(state, attrPath, v.attrs);
drv.queryName(); drv.queryName();
@ -326,11 +326,11 @@ static bool getDerivation(EvalState & state, Value & v,
} }
std::optional<DrvInfo> getDerivation(EvalState & state, Value & v, std::optional<PackageInfo> getDerivation(EvalState & state, Value & v,
bool ignoreAssertionFailures) bool ignoreAssertionFailures)
{ {
Done done; Done done;
DrvInfos drvs; PackageInfos drvs;
getDerivation(state, v, "", drvs, done, ignoreAssertionFailures); getDerivation(state, v, "", drvs, done, ignoreAssertionFailures);
if (drvs.size() != 1) return {}; if (drvs.size() != 1) return {};
return std::move(drvs.front()); return std::move(drvs.front());
@ -348,7 +348,7 @@ static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*");
static void getDerivations(EvalState & state, Value & vIn, static void getDerivations(EvalState & state, Value & vIn,
const std::string & pathPrefix, Bindings & autoArgs, const std::string & pathPrefix, Bindings & autoArgs,
DrvInfos & drvs, Done & done, PackageInfos & drvs, Done & done,
bool ignoreAssertionFailures) bool ignoreAssertionFailures)
{ {
Value v; Value v;
@ -396,12 +396,13 @@ static void getDerivations(EvalState & state, Value & vIn,
} }
} }
else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)"); else
state.error<TypeError>("expression does not evaluate to a derivation (or a set or list of those)").debugThrow();
} }
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
Bindings & autoArgs, DrvInfos & drvs, bool ignoreAssertionFailures) Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures)
{ {
Done done; Done done;
getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures); getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures);

View file

@ -10,8 +10,10 @@
namespace nix { namespace nix {
/**
struct DrvInfo * A "parsed" package attribute set.
*/
struct PackageInfo
{ {
public: public:
typedef std::map<std::string, std::optional<StorePath>> Outputs; typedef std::map<std::string, std::optional<StorePath>> Outputs;
@ -43,9 +45,9 @@ public:
*/ */
std::string attrPath; std::string attrPath;
DrvInfo(EvalState & state) : state(&state) { }; PackageInfo(EvalState & state) : state(&state) { };
DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs); PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs);
DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs); PackageInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs);
std::string queryName() const; std::string queryName() const;
std::string querySystem() const; std::string querySystem() const;
@ -82,21 +84,21 @@ public:
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
typedef std::list<DrvInfo, traceable_allocator<DrvInfo>> DrvInfos; typedef std::list<PackageInfo, traceable_allocator<PackageInfo>> PackageInfos;
#else #else
typedef std::list<DrvInfo> DrvInfos; typedef std::list<PackageInfo> PackageInfos;
#endif #endif
/** /**
* If value `v` denotes a derivation, return a DrvInfo object * If value `v` denotes a derivation, return a PackageInfo object
* describing it. Otherwise return nothing. * describing it. Otherwise return nothing.
*/ */
std::optional<DrvInfo> getDerivation(EvalState & state, std::optional<PackageInfo> getDerivation(EvalState & state,
Value & v, bool ignoreAssertionFailures); Value & v, bool ignoreAssertionFailures);
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
Bindings & autoArgs, DrvInfos & drvs, Bindings & autoArgs, PackageInfos & drvs,
bool ignoreAssertionFailures); bool ignoreAssertionFailures);

Some files were not shown because too many files have changed in this diff Show more