mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-10 08:16:15 +02:00
Merge remote-tracking branch 'origin/master' into tarball-cache
This commit is contained in:
commit
1fce12ec32
364 changed files with 5262 additions and 3101 deletions
31
.github/labeler.yml
vendored
31
.github/labeler.yml
vendored
|
@ -1,23 +1,30 @@
|
||||||
"documentation":
|
"documentation":
|
||||||
- doc/manual/*
|
- changed-files:
|
||||||
- src/nix/**/*.md
|
- any-glob-to-any-file: "doc/manual/*"
|
||||||
|
- any-glob-to-any-file: "src/nix/**/*.md"
|
||||||
|
|
||||||
"store":
|
"store":
|
||||||
- src/libstore/store-api.*
|
- changed-files:
|
||||||
- src/libstore/*-store.*
|
- any-glob-to-any-file: "src/libstore/store-api.*"
|
||||||
|
- any-glob-to-any-file: "src/libstore/*-store.*"
|
||||||
|
|
||||||
"fetching":
|
"fetching":
|
||||||
- src/libfetchers/**/*
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: "src/libfetchers/**/*"
|
||||||
|
|
||||||
"repl":
|
"repl":
|
||||||
- src/libcmd/repl.*
|
- changed-files:
|
||||||
- src/nix/repl.*
|
- any-glob-to-any-file: "src/libcmd/repl.*"
|
||||||
|
- any-glob-to-any-file: "src/nix/repl.*"
|
||||||
|
|
||||||
"new-cli":
|
"new-cli":
|
||||||
- src/nix/**/*
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: "src/nix/**/*"
|
||||||
|
|
||||||
"with-tests":
|
"with-tests":
|
||||||
# Unit tests
|
- changed-files:
|
||||||
- src/*/tests/**/*
|
# Unit tests
|
||||||
# Functional and integration tests
|
- any-glob-to-any-file: "src/*/tests/**/*"
|
||||||
- tests/functional/**/*
|
# Functional and integration tests
|
||||||
|
- any-glob-to-any-file: "tests/functional/**/*"
|
||||||
|
|
||||||
|
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v2.1.1
|
uses: zeebe-io/backport-action@v2.2.0
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
|
@ -20,12 +20,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
# The sandbox would otherwise be disabled by default on Darwin
|
# The sandbox would otherwise be disabled by default on Darwin
|
||||||
extra_nix_config: "sandbox = true"
|
extra_nix_config: "sandbox = true"
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v12
|
- uses: cachix/cachix-action@v13
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
@ -62,10 +62,10 @@ jobs:
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||||
- uses: cachix/cachix-action@v12
|
- uses: cachix/cachix-action@v13
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
@ -84,7 +84,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
|
@ -114,12 +114,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v12
|
- uses: cachix/cachix-action@v13
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
|
2
.github/workflows/labels.yml
vendored
2
.github/workflows/labels.yml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.repository_owner == 'NixOS'
|
if: github.repository_owner == 'NixOS'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v4
|
- uses: actions/labeler@v5
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
sync-labels: false
|
sync-labels: false
|
||||||
|
|
10
.gitignore
vendored
10
.gitignore
vendored
|
@ -22,6 +22,8 @@ perl/Makefile.config
|
||||||
/doc/manual/xp-features.json
|
/doc/manual/xp-features.json
|
||||||
/doc/manual/src/SUMMARY.md
|
/doc/manual/src/SUMMARY.md
|
||||||
/doc/manual/src/SUMMARY-rl-next.md
|
/doc/manual/src/SUMMARY-rl-next.md
|
||||||
|
/doc/manual/src/store/types/*
|
||||||
|
!/doc/manual/src/store/types/index.md.in
|
||||||
/doc/manual/src/command-ref/new-cli
|
/doc/manual/src/command-ref/new-cli
|
||||||
/doc/manual/src/command-ref/conf-file.md
|
/doc/manual/src/command-ref/conf-file.md
|
||||||
/doc/manual/src/command-ref/experimental-features-shortlist.md
|
/doc/manual/src/command-ref/experimental-features-shortlist.md
|
||||||
|
@ -43,18 +45,18 @@ perl/Makefile.config
|
||||||
/src/libexpr/parser-tab.hh
|
/src/libexpr/parser-tab.hh
|
||||||
/src/libexpr/parser-tab.output
|
/src/libexpr/parser-tab.output
|
||||||
/src/libexpr/nix.tbl
|
/src/libexpr/nix.tbl
|
||||||
/src/libexpr/tests/libnixexpr-tests
|
/tests/unit/libexpr/libnixexpr-tests
|
||||||
|
|
||||||
# /src/libstore/
|
# /src/libstore/
|
||||||
*.gen.*
|
*.gen.*
|
||||||
/src/libstore/tests/libnixstore-tests
|
/tests/unit/libstore/libnixstore-tests
|
||||||
|
|
||||||
# /src/libutil/
|
# /src/libutil/
|
||||||
/src/libutil/tests/libnixutil-tests
|
/tests/unit/libutil/libnixutil-tests
|
||||||
|
|
||||||
/src/nix/nix
|
/src/nix/nix
|
||||||
|
|
||||||
/src/nix/doc
|
/src/nix/generated-doc
|
||||||
|
|
||||||
# /src/nix-env/
|
# /src/nix-env/
|
||||||
/src/nix-env/nix-env
|
/src/nix-env/nix-env
|
||||||
|
|
29
Makefile
29
Makefile
|
@ -1,5 +1,7 @@
|
||||||
-include Makefile.config
|
include mk/build-dir.mk
|
||||||
clean-files += Makefile.config
|
|
||||||
|
-include $(buildprefix)Makefile.config
|
||||||
|
clean-files += $(buildprefix)Makefile.config
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD), yes)
|
ifeq ($(ENABLE_BUILD), yes)
|
||||||
makefiles = \
|
makefiles = \
|
||||||
|
@ -19,17 +21,17 @@ makefiles = \
|
||||||
misc/zsh/local.mk \
|
misc/zsh/local.mk \
|
||||||
misc/systemd/local.mk \
|
misc/systemd/local.mk \
|
||||||
misc/launchd/local.mk \
|
misc/launchd/local.mk \
|
||||||
misc/upstart/local.mk \
|
misc/upstart/local.mk
|
||||||
doc/manual/local.mk \
|
|
||||||
doc/internal-api/local.mk
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
||||||
UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data
|
|
||||||
makefiles += \
|
makefiles += \
|
||||||
src/libutil/tests/local.mk \
|
tests/unit/libutil/local.mk \
|
||||||
src/libstore/tests/local.mk \
|
tests/unit/libutil-support/local.mk \
|
||||||
src/libexpr/tests/local.mk
|
tests/unit/libstore/local.mk \
|
||||||
|
tests/unit/libstore-support/local.mk \
|
||||||
|
tests/unit/libexpr/local.mk \
|
||||||
|
tests/unit/libexpr-support/local.mk
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(ENABLE_TESTS), yes)
|
ifeq ($(ENABLE_TESTS), yes)
|
||||||
|
@ -55,4 +57,11 @@ endif
|
||||||
|
|
||||||
include mk/lib.mk
|
include mk/lib.mk
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src
|
# Must be included after `mk/lib.mk` so rules refer to variables defined
|
||||||
|
# by the library. Rules are not "lazy" like variables, unfortunately.
|
||||||
|
ifeq ($(ENABLE_BUILD), yes)
|
||||||
|
$(eval $(call include-sub-makefile, doc/manual/local.mk))
|
||||||
|
endif
|
||||||
|
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
||||||
|
|
||||||
|
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
|
||||||
|
|
12
boehmgc-traceable_allocator-public.diff
Normal file
12
boehmgc-traceable_allocator-public.diff
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
diff --git a/include/gc_allocator.h b/include/gc_allocator.h
|
||||||
|
index 597c7f13..587286be 100644
|
||||||
|
--- a/include/gc_allocator.h
|
||||||
|
+++ b/include/gc_allocator.h
|
||||||
|
@@ -312,6 +312,7 @@ public:
|
||||||
|
|
||||||
|
template<>
|
||||||
|
class traceable_allocator<void> {
|
||||||
|
+public:
|
||||||
|
typedef size_t size_type;
|
||||||
|
typedef ptrdiff_t difference_type;
|
||||||
|
typedef void* pointer;
|
52
configure.ac
52
configure.ac
|
@ -122,7 +122,6 @@ AC_PATH_PROG(flex, flex, false)
|
||||||
AC_PATH_PROG(bison, bison, false)
|
AC_PATH_PROG(bison, bison, false)
|
||||||
AC_PATH_PROG(dot, dot)
|
AC_PATH_PROG(dot, dot)
|
||||||
AC_PATH_PROG(lsof, lsof, lsof)
|
AC_PATH_PROG(lsof, lsof, lsof)
|
||||||
NEED_PROG(jq, jq)
|
|
||||||
|
|
||||||
|
|
||||||
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
||||||
|
@ -133,6 +132,30 @@ AC_ARG_WITH(store-dir, AS_HELP_STRING([--with-store-dir=PATH],[path of the Nix s
|
||||||
AC_SUBST(storedir)
|
AC_SUBST(storedir)
|
||||||
|
|
||||||
|
|
||||||
|
# Running the functional tests without building Nix is useful for testing
|
||||||
|
# different pre-built versions of Nix against each other.
|
||||||
|
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
||||||
|
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
||||||
|
AC_SUBST(ENABLE_BUILD)
|
||||||
|
|
||||||
|
# Building without tests is useful for bootstrapping with a smaller footprint
|
||||||
|
# or running the tests in a separate derivation. Otherwise, we do compile and
|
||||||
|
# run them.
|
||||||
|
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
||||||
|
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
||||||
|
AC_SUBST(ENABLE_TESTS)
|
||||||
|
|
||||||
|
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
||||||
|
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
||||||
|
internal_api_docs=$enableval, internal_api_docs=no)
|
||||||
|
AC_SUBST(internal_api_docs)
|
||||||
|
|
||||||
|
AS_IF(
|
||||||
|
[test "$ENABLE_BUILD" == "yes" || test "$ENABLE_TEST" == "yes"],
|
||||||
|
[NEED_PROG(jq, jq)])
|
||||||
|
|
||||||
|
AS_IF([test "$ENABLE_BUILD" == "yes"],[
|
||||||
|
|
||||||
# Look for boost, a required dependency.
|
# Look for boost, a required dependency.
|
||||||
# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags,
|
# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags,
|
||||||
# and CPPFLAGS are not passed to the C++ compiler automatically.
|
# and CPPFLAGS are not passed to the C++ compiler automatically.
|
||||||
|
@ -155,18 +178,6 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
|
||||||
LDFLAGS="-latomic $LDFLAGS"
|
LDFLAGS="-latomic $LDFLAGS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Running the functional tests without building Nix is useful for testing
|
|
||||||
# different pre-built versions of Nix against each other.
|
|
||||||
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
|
||||||
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
|
||||||
AC_SUBST(ENABLE_BUILD)
|
|
||||||
# Building without tests is useful for bootstrapping with a smaller footprint
|
|
||||||
# or running the tests in a separate derivation. Otherwise, we do compile and
|
|
||||||
# run them.
|
|
||||||
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
|
||||||
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
|
||||||
AC_SUBST(ENABLE_TESTS)
|
|
||||||
|
|
||||||
AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]),
|
AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]),
|
||||||
INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no)
|
INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no)
|
||||||
AC_SUBST(INSTALL_UNIT_TESTS)
|
AC_SUBST(INSTALL_UNIT_TESTS)
|
||||||
|
@ -179,11 +190,6 @@ AC_ARG_WITH(check-lib-dir, AS_HELP_STRING([--with-check-lib-dir=PATH],[path to i
|
||||||
checklibdir=$withval, checklibdir=$libdir)
|
checklibdir=$withval, checklibdir=$libdir)
|
||||||
AC_SUBST(checklibdir)
|
AC_SUBST(checklibdir)
|
||||||
|
|
||||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
|
||||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
|
||||||
internal_api_docs=$enableval, internal_api_docs=no)
|
|
||||||
AC_SUBST(internal_api_docs)
|
|
||||||
|
|
||||||
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
|
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
|
||||||
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
|
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
|
||||||
lto=$enableval, lto=no)
|
lto=$enableval, lto=no)
|
||||||
|
@ -282,6 +288,8 @@ case "$host_os" in
|
||||||
esac
|
esac
|
||||||
AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
|
AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
|
||||||
|
|
||||||
|
# Optional dependencies for better normalizing file system data
|
||||||
|
AC_CHECK_HEADERS[sys/xattr.h]
|
||||||
|
|
||||||
# Look for aws-cpp-sdk-s3.
|
# Look for aws-cpp-sdk-s3.
|
||||||
AC_LANG_PUSH(C++)
|
AC_LANG_PUSH(C++)
|
||||||
|
@ -308,8 +316,7 @@ if test "$gc" = yes; then
|
||||||
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
|
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
AS_IF([test "$ENABLE_TESTS" == "yes"],[
|
||||||
if test "$ENABLE_TESTS" = yes; then
|
|
||||||
|
|
||||||
# Look for gtest.
|
# Look for gtest.
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||||
|
@ -336,12 +343,11 @@ AC_LINK_IFELSE([
|
||||||
[AC_MSG_ERROR([librapidcheck is not found.])])
|
[AC_MSG_ERROR([librapidcheck is not found.])])
|
||||||
AC_LANG_POP(C++)
|
AC_LANG_POP(C++)
|
||||||
|
|
||||||
fi
|
])
|
||||||
|
|
||||||
# Look for nlohmann/json.
|
# Look for nlohmann/json.
|
||||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||||
|
|
||||||
|
|
||||||
# documentation generation switch
|
# documentation generation switch
|
||||||
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
||||||
doc_generate=$enableval, doc_generate=yes)
|
doc_generate=$enableval, doc_generate=yes)
|
||||||
|
@ -386,6 +392,8 @@ if test "$embedded_sandbox_shell" = yes; then
|
||||||
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
# Expand all variables in config.status.
|
# Expand all variables in config.status.
|
||||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
test "$prefix" = NONE && prefix=$ac_default_prefix
|
||||||
|
|
|
@ -39,17 +39,21 @@ INPUT = \
|
||||||
src/libcmd \
|
src/libcmd \
|
||||||
src/libexpr \
|
src/libexpr \
|
||||||
src/libexpr/flake \
|
src/libexpr/flake \
|
||||||
src/libexpr/tests \
|
tests/unit/libexpr \
|
||||||
src/libexpr/tests/value \
|
tests/unit/libexpr/value \
|
||||||
|
tests/unit/libexpr/test \
|
||||||
|
tests/unit/libexpr/test/value \
|
||||||
src/libexpr/value \
|
src/libexpr/value \
|
||||||
src/libfetchers \
|
src/libfetchers \
|
||||||
src/libmain \
|
src/libmain \
|
||||||
src/libstore \
|
src/libstore \
|
||||||
src/libstore/build \
|
src/libstore/build \
|
||||||
src/libstore/builtins \
|
src/libstore/builtins \
|
||||||
src/libstore/tests \
|
tests/unit/libstore \
|
||||||
|
tests/unit/libstore/test \
|
||||||
src/libutil \
|
src/libutil \
|
||||||
src/libutil/tests \
|
tests/unit/libutil \
|
||||||
|
tests/unit/libutil/test \
|
||||||
src/nix \
|
src/nix \
|
||||||
src/nix-env \
|
src/nix-env \
|
||||||
src/nix-store
|
src/nix-store
|
||||||
|
|
|
@ -13,18 +13,28 @@
|
||||||
# conventions:
|
# conventions:
|
||||||
# - always force (<CODE>!) since this allows re-using file names
|
# - always force (<CODE>!) since this allows re-using file names
|
||||||
# - group related paths to ease readability
|
# - group related paths to ease readability
|
||||||
# - always append new redirects to the end of the file
|
# - keep in alphabetical/wildcards-last order, which will reduce version control conflicts
|
||||||
# - redirects that should have been there but are missing can be inserted where they belong
|
# - redirects that should have been there but are missing can be inserted where they belong
|
||||||
|
|
||||||
|
/advanced-topics/advanced-topics /advanced-topics 301!
|
||||||
|
|
||||||
|
/command-ref/command-ref /command-ref 301!
|
||||||
|
|
||||||
|
/contributing/contributing /contributing 301!
|
||||||
|
|
||||||
/expressions/expression-language /language/ 301!
|
/expressions/expression-language /language/ 301!
|
||||||
/expressions/language-values /language/values 301!
|
|
||||||
/expressions/language-constructs /language/constructs 301!
|
/expressions/language-constructs /language/constructs 301!
|
||||||
/expressions/language-operators /language/operators 301!
|
/expressions/language-operators /language/operators 301!
|
||||||
|
/expressions/language-values /language/values 301!
|
||||||
/expressions/* /language/:splat 301!
|
/expressions/* /language/:splat 301!
|
||||||
|
|
||||||
|
/installation/installation /installation 301!
|
||||||
|
|
||||||
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
||||||
|
/package-management/channels /command-ref/nix-channel 301!
|
||||||
|
/package-management/package-management /package-management 301!
|
||||||
|
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
||||||
|
|
||||||
/package-management/channels* /command-ref/nix-channel 301!
|
/protocols/protocols /protocols 301!
|
||||||
|
|
||||||
/package-management/s3-substituter* /command-ref/new-cli/nix3-help-stores#s3-binary-cache-store 301!
|
|
||||||
|
|
||||||
|
/release-notes/release-notes /release-notes 301!
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
let
|
let
|
||||||
inherit (builtins) concatStringsSep attrValues mapAttrs;
|
inherit (builtins) concatStringsSep attrValues mapAttrs;
|
||||||
inherit (import ./utils.nix) optionalString squash;
|
inherit (import <nix/utils.nix>) optionalString squash;
|
||||||
in
|
in
|
||||||
|
|
||||||
builtinsInfo:
|
builtinsInfo:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
let
|
let
|
||||||
inherit (builtins) concatStringsSep attrValues mapAttrs;
|
inherit (builtins) concatStringsSep attrValues mapAttrs;
|
||||||
inherit (import ./utils.nix) optionalString squash;
|
inherit (import <nix/utils.nix>) optionalString squash;
|
||||||
in
|
in
|
||||||
|
|
||||||
builtinsInfo:
|
builtinsInfo:
|
||||||
|
@ -8,7 +8,15 @@ let
|
||||||
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
||||||
let
|
let
|
||||||
experimentalNotice = optionalString (experimental-feature != null) ''
|
experimentalNotice = optionalString (experimental-feature != null) ''
|
||||||
This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled.
|
> **Note**
|
||||||
|
>
|
||||||
|
> This function is only available if the [`${experimental-feature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) is enabled.
|
||||||
|
>
|
||||||
|
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> extra-experimental-features = ${experimental-feature}
|
||||||
|
> ```
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
squash ''
|
squash ''
|
||||||
|
@ -17,10 +25,9 @@ let
|
||||||
</dt>
|
</dt>
|
||||||
<dd>
|
<dd>
|
||||||
|
|
||||||
${doc}
|
|
||||||
|
|
||||||
${experimentalNotice}
|
${experimentalNotice}
|
||||||
|
|
||||||
|
${doc}
|
||||||
</dd>
|
</dd>
|
||||||
'';
|
'';
|
||||||
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||||
|
|
|
@ -1,9 +1,29 @@
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins)
|
||||||
attrNames attrValues fromJSON listToAttrs mapAttrs groupBy
|
attrNames
|
||||||
concatStringsSep concatMap length lessThan replaceStrings sort;
|
attrValues
|
||||||
inherit (import <nix/utils.nix>) attrsToList concatStrings optionalString filterAttrs trim squash unique;
|
concatMap
|
||||||
showStoreDocs = import ./generate-store-info.nix;
|
concatStringsSep
|
||||||
|
fromJSON
|
||||||
|
groupBy
|
||||||
|
length
|
||||||
|
lessThan
|
||||||
|
listToAttrs
|
||||||
|
mapAttrs
|
||||||
|
match
|
||||||
|
replaceStrings
|
||||||
|
sort
|
||||||
|
;
|
||||||
|
inherit (import <nix/utils.nix>)
|
||||||
|
attrsToList
|
||||||
|
concatStrings
|
||||||
|
filterAttrs
|
||||||
|
optionalString
|
||||||
|
squash
|
||||||
|
trim
|
||||||
|
unique
|
||||||
|
;
|
||||||
|
showStoreDocs = import <nix/generate-store-info.nix>;
|
||||||
in
|
in
|
||||||
|
|
||||||
inlineHTML: commandDump:
|
inlineHTML: commandDump:
|
||||||
|
@ -31,7 +51,7 @@ let
|
||||||
|
|
||||||
${maybeSubcommands}
|
${maybeSubcommands}
|
||||||
|
|
||||||
${maybeStoreDocs}
|
${maybeProse}
|
||||||
|
|
||||||
${maybeOptions}
|
${maybeOptions}
|
||||||
'';
|
'';
|
||||||
|
@ -71,25 +91,56 @@ let
|
||||||
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# FIXME: this is a hack.
|
maybeProse =
|
||||||
# store parameters should not be part of command documentation to begin
|
# FIXME: this is a horrible hack to keep `nix help-stores` working.
|
||||||
# with, but instead be rendered on separate pages.
|
# the correct answer to this is to remove that command and replace it
|
||||||
maybeStoreDocs = optionalString (details ? doc)
|
# by statically generated manpages or the output of something like `nix
|
||||||
(replaceStrings [ "@stores@" ] [ (showStoreDocs inlineHTML commandInfo.stores) ] details.doc);
|
# store info <store type>`.
|
||||||
|
let
|
||||||
|
help-stores = ''
|
||||||
|
${index}
|
||||||
|
|
||||||
maybeOptions = let
|
${allStores}
|
||||||
allVisibleOptions = filterAttrs
|
'';
|
||||||
(_: o: ! o.hiddenCategory)
|
index = replaceStrings
|
||||||
(details.flags // toplevel.flags);
|
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
|
||||||
in optionalString (allVisibleOptions != {}) ''
|
[ storesOverview "#local-store" "#local-daemon-store" ]
|
||||||
# Options
|
details.doc;
|
||||||
|
storesOverview =
|
||||||
|
let
|
||||||
|
showEntry = store:
|
||||||
|
"- [${store.name}](#${store.slug})";
|
||||||
|
in
|
||||||
|
concatStringsSep "\n" (map showEntry storesList) + "\n";
|
||||||
|
allStores = concatStringsSep "\n" (attrValues storePages);
|
||||||
|
storePages = listToAttrs
|
||||||
|
(map (s: { name = s.filename; value = s.page; }) storesList);
|
||||||
|
storesList = showStoreDocs {
|
||||||
|
storeInfo = commandInfo.stores;
|
||||||
|
inherit inlineHTML;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
optionalString (details ? doc) (
|
||||||
|
if match "@store-types@" details.doc != [ ]
|
||||||
|
then help-stores
|
||||||
|
else details.doc
|
||||||
|
);
|
||||||
|
|
||||||
${showOptions inlineHTML allVisibleOptions}
|
maybeOptions =
|
||||||
|
let
|
||||||
|
allVisibleOptions = filterAttrs
|
||||||
|
(_: o: ! o.hiddenCategory)
|
||||||
|
(details.flags // toplevel.flags);
|
||||||
|
in
|
||||||
|
optionalString (allVisibleOptions != { }) ''
|
||||||
|
# Options
|
||||||
|
|
||||||
> **Note**
|
${showOptions inlineHTML allVisibleOptions}
|
||||||
>
|
|
||||||
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
> **Note**
|
||||||
'';
|
>
|
||||||
|
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
||||||
|
'';
|
||||||
|
|
||||||
showOptions = inlineHTML: allOptions:
|
showOptions = inlineHTML: allOptions:
|
||||||
let
|
let
|
||||||
|
@ -97,7 +148,7 @@ let
|
||||||
${optionalString (cat != "") "## ${cat}"}
|
${optionalString (cat != "") "## ${cat}"}
|
||||||
|
|
||||||
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
|
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
|
||||||
'';
|
'';
|
||||||
showOption = name: option:
|
showOption = name: option:
|
||||||
let
|
let
|
||||||
result = trim ''
|
result = trim ''
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
let
|
let
|
||||||
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs;
|
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs;
|
||||||
inherit (import ./utils.nix) concatStrings indent optionalString squash;
|
inherit (import <nix/utils.nix>) concatStrings indent optionalString squash;
|
||||||
in
|
in
|
||||||
|
|
||||||
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
|
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
|
||||||
|
@ -20,10 +20,10 @@ let
|
||||||
else "`${setting}`";
|
else "`${setting}`";
|
||||||
# separate body to cleanly handle indentation
|
# separate body to cleanly handle indentation
|
||||||
body = ''
|
body = ''
|
||||||
${description}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
${description}
|
||||||
|
|
||||||
**Default:** ${showDefault documentDefault defaultValue}
|
**Default:** ${showDefault documentDefault defaultValue}
|
||||||
|
|
||||||
${showAliases aliases}
|
${showAliases aliases}
|
||||||
|
@ -31,18 +31,19 @@ let
|
||||||
|
|
||||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||||
> **Warning**
|
> **Warning**
|
||||||
|
>
|
||||||
> This setting is part of an
|
> This setting is part of an
|
||||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||||
|
>
|
||||||
To change this setting, you need to make sure the corresponding experimental feature,
|
> To change this setting, make sure the
|
||||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
> [`${experimentalFeature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature})
|
||||||
is enabled.
|
> is enabled.
|
||||||
For example, include the following in [`nix.conf`](#):
|
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||||
|
>
|
||||||
```
|
> ```
|
||||||
extra-experimental-features = ${experimentalFeature}
|
> extra-experimental-features = ${experimentalFeature}
|
||||||
${setting} = ...
|
> ${setting} = ...
|
||||||
```
|
> ```
|
||||||
'';
|
'';
|
||||||
|
|
||||||
showDefault = documentDefault: defaultValue:
|
showDefault = documentDefault: defaultValue:
|
||||||
|
|
|
@ -1,45 +1,57 @@
|
||||||
let
|
let
|
||||||
inherit (builtins) attrValues mapAttrs;
|
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings;
|
||||||
inherit (import ./utils.nix) concatStrings optionalString;
|
inherit (import <nix/utils.nix>) optionalString filterAttrs trim squash toLower unique indent;
|
||||||
showSettings = import ./generate-settings.nix;
|
showSettings = import <nix/generate-settings.nix>;
|
||||||
in
|
in
|
||||||
|
|
||||||
inlineHTML: storesInfo:
|
{
|
||||||
|
# data structure describing all stores and their parameters
|
||||||
|
storeInfo,
|
||||||
|
# whether to add inline HTML tags
|
||||||
|
# `lowdown` does not eat those for one of the output modes
|
||||||
|
inlineHTML,
|
||||||
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
showStore = name: { settings, doc, experimentalFeature }:
|
showStore = { name, slug }: { settings, doc, experimentalFeature }:
|
||||||
let
|
let
|
||||||
|
result = squash ''
|
||||||
|
# ${name}
|
||||||
|
|
||||||
result = ''
|
${experimentalFeatureNote}
|
||||||
## ${name}
|
|
||||||
|
|
||||||
${doc}
|
${doc}
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
## Settings
|
||||||
|
|
||||||
### Settings
|
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||||
|
'';
|
||||||
|
|
||||||
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||||
'';
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This store is part of an
|
||||||
|
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||||
|
>
|
||||||
|
> To use this store, make sure the
|
||||||
|
> [`${experimentalFeature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature})
|
||||||
|
> is enabled.
|
||||||
|
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> extra-experimental-features = ${experimentalFeature}
|
||||||
|
> ```
|
||||||
|
'';
|
||||||
|
in result;
|
||||||
|
|
||||||
# markdown doesn't like spaces in URLs
|
storesList = map
|
||||||
slug = builtins.replaceStrings [ " " ] [ "-" ] name;
|
(name: rec {
|
||||||
|
inherit name;
|
||||||
|
slug = replaceStrings [ " " ] [ "-" ] (toLower name);
|
||||||
|
filename = "${slug}.md";
|
||||||
|
page = showStore { inherit name slug; } storeInfo.${name};
|
||||||
|
})
|
||||||
|
(attrNames storeInfo);
|
||||||
|
|
||||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
in storesList
|
||||||
> **Warning**
|
|
||||||
> This store is part of an
|
|
||||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
|
||||||
|
|
||||||
To use this store, you need to make sure the corresponding experimental feature,
|
|
||||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
|
||||||
is enabled.
|
|
||||||
For example, include the following in [`nix.conf`](#):
|
|
||||||
|
|
||||||
```
|
|
||||||
extra-experimental-features = ${experimentalFeature}
|
|
||||||
```
|
|
||||||
'';
|
|
||||||
in result;
|
|
||||||
|
|
||||||
in concatStrings (attrValues (mapAttrs showStore storesInfo))
|
|
||||||
|
|
39
doc/manual/generate-store-types.nix
Normal file
39
doc/manual/generate-store-types.nix
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
let
|
||||||
|
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings;
|
||||||
|
showSettings = import <nix/generate-settings.nix>;
|
||||||
|
showStoreDocs = import <nix/generate-store-info.nix>;
|
||||||
|
in
|
||||||
|
|
||||||
|
storeInfo:
|
||||||
|
|
||||||
|
let
|
||||||
|
storesList = showStoreDocs {
|
||||||
|
inherit storeInfo;
|
||||||
|
inlineHTML = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
index =
|
||||||
|
let
|
||||||
|
showEntry = store:
|
||||||
|
"- [${store.name}](./${store.filename})";
|
||||||
|
in
|
||||||
|
concatStringsSep "\n" (map showEntry storesList);
|
||||||
|
|
||||||
|
"index.md" = replaceStrings
|
||||||
|
[ "@store-types@" ] [ index ]
|
||||||
|
(readFile ./src/store/types/index.md.in);
|
||||||
|
|
||||||
|
tableOfContents =
|
||||||
|
let
|
||||||
|
showEntry = store:
|
||||||
|
" - [${store.name}](store/types/${store.filename})";
|
||||||
|
in
|
||||||
|
concatStringsSep "\n" (map showEntry storesList) + "\n";
|
||||||
|
|
||||||
|
"SUMMARY.md" = tableOfContents;
|
||||||
|
|
||||||
|
storePages = listToAttrs
|
||||||
|
(map (s: { name = s.filename; value = s.page; }) storesList);
|
||||||
|
|
||||||
|
in
|
||||||
|
storePages // { inherit "index.md" "SUMMARY.md"; }
|
|
@ -1,5 +1,5 @@
|
||||||
with builtins;
|
with builtins;
|
||||||
with import ./utils.nix;
|
with import <nix/utils.nix>;
|
||||||
|
|
||||||
let
|
let
|
||||||
showExperimentalFeature = name: doc:
|
showExperimentalFeature = name: doc:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
with builtins;
|
with builtins;
|
||||||
with import ./utils.nix;
|
with import <nix/utils.nix>;
|
||||||
|
|
||||||
let
|
let
|
||||||
showExperimentalFeature = name: doc:
|
showExperimentalFeature = name: doc:
|
||||||
|
@ -8,4 +8,6 @@ let
|
||||||
|
|
||||||
${doc}
|
${doc}
|
||||||
'';
|
'';
|
||||||
in xps: (concatStringsSep "\n" (attrValues (mapAttrs showExperimentalFeature xps)))
|
in
|
||||||
|
|
||||||
|
xps: (concatStringsSep "\n" (attrValues (mapAttrs showExperimentalFeature xps)))
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
ifeq ($(doc_generate),yes)
|
ifeq ($(doc_generate),yes)
|
||||||
|
|
||||||
|
# The version of Nix used to generate the doc. Can also be
|
||||||
|
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
|
||||||
|
# if one prefers.
|
||||||
|
doc_nix = $(nix_PATH)
|
||||||
|
|
||||||
MANUAL_SRCS := \
|
MANUAL_SRCS := \
|
||||||
$(call rwildcard, $(d)/src, *.md) \
|
$(call rwildcard, $(d)/src, *.md) \
|
||||||
$(call rwildcard, $(d)/src, */*.md)
|
$(call rwildcard, $(d)/src, */*.md)
|
||||||
|
@ -32,7 +37,7 @@ dummy-env = env -i \
|
||||||
NIX_STATE_DIR=/dummy \
|
NIX_STATE_DIR=/dummy \
|
||||||
NIX_CONFIG='cores = 0'
|
NIX_CONFIG='cores = 0'
|
||||||
|
|
||||||
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw
|
nix-eval = $(dummy-env) $(doc_nix) eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw
|
||||||
|
|
||||||
# re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution
|
# re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution
|
||||||
define process-includes
|
define process-includes
|
||||||
|
@ -92,56 +97,63 @@ $(d)/nix-profiles.5: $(d)/src/command-ref/files/profiles.md
|
||||||
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
||||||
@rm $^.tmp
|
@rm $^.tmp
|
||||||
|
|
||||||
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/SUMMARY-rl-next.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md
|
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/SUMMARY-rl-next.md $(d)/src/store/types $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md
|
||||||
@cp $< $@
|
@cp $< $@
|
||||||
@$(call process-includes,$@,$@)
|
@$(call process-includes,$@,$@)
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(bindir)/nix
|
$(d)/src/store/types: $(d)/nix.json $(d)/utils.nix $(d)/generate-store-info.nix $(d)/generate-store-types.nix $(d)/src/store/types/index.md.in $(doc_nix)
|
||||||
|
@# FIXME: build out of tree!
|
||||||
|
@rm -rf $@.tmp
|
||||||
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-store-types.nix (builtins.fromJSON (builtins.readFile $<)).stores'
|
||||||
|
@# do not destroy existing contents
|
||||||
|
@mv $@.tmp/* $@/
|
||||||
|
|
||||||
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(doc_nix)
|
||||||
@rm -rf $@ $@.tmp
|
@rm -rf $@ $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)'
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)'
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(doc_nix)
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "conf"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "conf"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/nix.json: $(bindir)/nix
|
$(d)/nix.json: $(doc_nix)
|
||||||
$(trace-gen) $(dummy-env) $(bindir)/nix __dump-cli > $@.tmp
|
$(trace-gen) $(dummy-env) $(doc_nix) __dump-cli > $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/conf-file.json: $(bindir)/nix
|
$(d)/conf-file.json: $(doc_nix)
|
||||||
$(trace-gen) $(dummy-env) $(bindir)/nix config show --json --experimental-features nix-command > $@.tmp
|
$(trace-gen) $(dummy-env) $(doc_nix) config show --json --experimental-features nix-command > $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/contributing/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(bindir)/nix
|
$(d)/src/contributing/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix)
|
||||||
@rm -rf $@ $@.tmp
|
@rm -rf $@ $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))'
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))'
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(bindir)/nix
|
$(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(doc_nix)
|
||||||
@rm -rf $@ $@.tmp
|
@rm -rf $@ $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))'
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))'
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/xp-features.json: $(bindir)/nix
|
$(d)/xp-features.json: $(doc_nix)
|
||||||
$(trace-gen) $(dummy-env) $(bindir)/nix __dump-xp-features > $@.tmp
|
$(trace-gen) $(dummy-env) $(doc_nix) __dump-xp-features > $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(doc_nix)
|
||||||
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<)).builtins' >> $@.tmp;
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<)).builtins' >> $@.tmp;
|
||||||
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin-constants.nix $(d)/src/language/builtin-constants-prefix.md $(bindir)/nix
|
$(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin-constants.nix $(d)/src/language/builtin-constants-prefix.md $(doc_nix)
|
||||||
@cat doc/manual/src/language/builtin-constants-prefix.md > $@.tmp
|
@cat doc/manual/src/language/builtin-constants-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtin-constants.nix (builtins.fromJSON (builtins.readFile $<)).constants' >> $@.tmp;
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtin-constants.nix (builtins.fromJSON (builtins.readFile $<)).constants' >> $@.tmp;
|
||||||
@cat doc/manual/src/language/builtin-constants-suffix.md >> $@.tmp
|
@cat doc/manual/src/language/builtin-constants-suffix.md >> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/language.json: $(bindir)/nix
|
$(d)/language.json: $(doc_nix)
|
||||||
$(trace-gen) $(dummy-env) $(bindir)/nix __dump-language > $@.tmp
|
$(trace-gen) $(dummy-env) $(doc_nix) __dump-language > $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
# Generate "Upcoming release" notes (or clear it and remove from menu)
|
# Generate "Upcoming release" notes (or clear it and remove from menu)
|
||||||
|
@ -195,7 +207,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||||
# `@docroot@` is to be preserved for documenting the mechanism
|
# `@docroot@` is to be preserved for documenting the mechanism
|
||||||
# FIXME: maybe contributing guides should live right next to the code
|
# FIXME: maybe contributing guides should live right next to the code
|
||||||
# instead of in the manual
|
# instead of in the manual
|
||||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md $(d)/src/release-notes/rl-next.md
|
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/store/types $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md $(d)/src/release-notes/rl-next.md
|
||||||
$(trace-gen) \
|
$(trace-gen) \
|
||||||
tmp="$$(mktemp -d)"; \
|
tmp="$$(mktemp -d)"; \
|
||||||
cp -r doc/manual "$$tmp"; \
|
cp -r doc/manual "$$tmp"; \
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes
|
||||||
|
prs: 9547
|
||||||
|
---
|
||||||
|
|
||||||
|
If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed.
|
||||||
|
Previously this only worked for schemes whose URIs used the `://` syntax.
|
8
doc/manual/rl-next/cgroup-stats.md
Normal file
8
doc/manual/rl-next/cgroup-stats.md
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
synopsis: Include cgroup stats when building through the daemon
|
||||||
|
prs: 9598
|
||||||
|
---
|
||||||
|
|
||||||
|
Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng,
|
||||||
|
if both sides of the connection are this version of Nix or newer.
|
||||||
|
|
23
doc/manual/rl-next/hash-format-nix32.md
Normal file
23
doc/manual/rl-next/hash-format-nix32.md
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
---
|
||||||
|
synopsis: Rename hash format `base32` to `nix32`
|
||||||
|
prs: 9452
|
||||||
|
---
|
||||||
|
|
||||||
|
Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for
|
||||||
|
[Base32](https://en.wikipedia.org/wiki/Base32).
|
||||||
|
|
||||||
|
## Deprecation: Use `nix32` instead of `base32` as `toHashFormat`
|
||||||
|
|
||||||
|
For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from`
|
||||||
|
parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value
|
||||||
|
remains as a deprecated alias for `"base32"`. Please convert your code from:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}
|
||||||
|
```
|
||||||
|
|
||||||
|
to
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}
|
||||||
|
```
|
|
@ -1,9 +1,8 @@
|
||||||
|
---
|
||||||
synopsis: Mounted SSH Store
|
synopsis: Mounted SSH Store
|
||||||
issues: #7890
|
issues: 7890
|
||||||
prs: #7912
|
prs: 7912
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||||
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
synopsis: `nix config show`
|
---
|
||||||
issues: #7672
|
synopsis: Rename to `nix config show`
|
||||||
prs: #9477
|
issues: 7672
|
||||||
description: {
|
prs: 9477
|
||||||
|
---
|
||||||
|
|
||||||
`nix show-config` was renamed to `nix config show` to be more consistent with the rest of the command-line interface.
|
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
|
---
|
||||||
synopsis: Fix `nix-env --query --drv-path --json`
|
synopsis: Fix `nix-env --query --drv-path --json`
|
||||||
prs: #9257
|
prs: 9257
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
47
doc/manual/rl-next/nix-hash-convert.md
Normal file
47
doc/manual/rl-next/nix-hash-convert.md
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
---
|
||||||
|
synopsis: Add `nix hash convert`
|
||||||
|
prs: 9452
|
||||||
|
---
|
||||||
|
|
||||||
|
New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track
|
||||||
|
to stabilization! Examples:
|
||||||
|
|
||||||
|
- Convert the hash to `nix32`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||||
|
vw46m23bizj4n8afrc0fj19wrp7mj3c0
|
||||||
|
```
|
||||||
|
`nix32` is a base32 encoding with a nix-specific character set.
|
||||||
|
Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input
|
||||||
|
hash.
|
||||||
|
- Convert the hash to the `sri` format that includes an algorithm specification:
|
||||||
|
```bash
|
||||||
|
nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||||
|
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
||||||
|
```
|
||||||
|
or with an explicit `-to` format:
|
||||||
|
```bash
|
||||||
|
nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||||
|
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
||||||
|
```
|
||||||
|
- Assert the input format of the hash:
|
||||||
|
```bash
|
||||||
|
nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
|
||||||
|
error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32'
|
||||||
|
nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
|
||||||
|
sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=
|
||||||
|
```
|
||||||
|
|
||||||
|
The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion.
|
||||||
|
|
||||||
|
## Related Deprecations
|
||||||
|
|
||||||
|
The following commands are still available but will emit a deprecation warning. Please convert your code to
|
||||||
|
`nix hash convert`:
|
||||||
|
|
||||||
|
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2`
|
||||||
|
or even just `nix hash convert $hash1 $hash2` instead.
|
42
doc/manual/rl-next/source-positions-in-errors.md
Normal file
42
doc/manual/rl-next/source-positions-in-errors.md
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
---
|
||||||
|
synopsis: Source locations are printed more consistently in errors
|
||||||
|
issues: 561
|
||||||
|
prs: 9555
|
||||||
|
---
|
||||||
|
|
||||||
|
Source location information is now included in error messages more
|
||||||
|
consistently. Given this code:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
attr = {foo = "bar";};
|
||||||
|
key = {};
|
||||||
|
in
|
||||||
|
attr.${key}
|
||||||
|
```
|
||||||
|
|
||||||
|
Previously, Nix would show this unhelpful message when attempting to evaluate
|
||||||
|
it:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, the error message displays where the problematic value was found:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
at bad.nix:4:11:
|
||||||
|
|
||||||
|
3| key = {};
|
||||||
|
4| in attr.${key}
|
||||||
|
| ^
|
||||||
|
5|
|
||||||
|
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
- [Introduction](introduction.md)
|
- [Introduction](introduction.md)
|
||||||
- [Quick Start](quick-start.md)
|
- [Quick Start](quick-start.md)
|
||||||
- [Installation](installation/installation.md)
|
- [Installation](installation/index.md)
|
||||||
- [Supported Platforms](installation/supported-platforms.md)
|
- [Supported Platforms](installation/supported-platforms.md)
|
||||||
- [Installing a Binary Distribution](installation/installing-binary.md)
|
- [Installing a Binary Distribution](installation/installing-binary.md)
|
||||||
- [Installing Nix from Source](installation/installing-source.md)
|
- [Installing Nix from Source](installation/installing-source.md)
|
||||||
|
@ -20,6 +20,8 @@
|
||||||
- [File System Object](store/file-system-object.md)
|
- [File System Object](store/file-system-object.md)
|
||||||
- [Store Object](store/store-object.md)
|
- [Store Object](store/store-object.md)
|
||||||
- [Store Path](store/store-path.md)
|
- [Store Path](store/store-path.md)
|
||||||
|
- [Store Types](store/types/index.md)
|
||||||
|
{{#include ./store/types/SUMMARY.md}}
|
||||||
- [Nix Language](language/index.md)
|
- [Nix Language](language/index.md)
|
||||||
- [Data Types](language/values.md)
|
- [Data Types](language/values.md)
|
||||||
- [Language Constructs](language/constructs.md)
|
- [Language Constructs](language/constructs.md)
|
||||||
|
@ -31,11 +33,11 @@
|
||||||
- [Import From Derivation](language/import-from-derivation.md)
|
- [Import From Derivation](language/import-from-derivation.md)
|
||||||
- [Built-in Constants](language/builtin-constants.md)
|
- [Built-in Constants](language/builtin-constants.md)
|
||||||
- [Built-in Functions](language/builtins.md)
|
- [Built-in Functions](language/builtins.md)
|
||||||
- [Package Management](package-management/package-management.md)
|
- [Package Management](package-management/index.md)
|
||||||
- [Profiles](package-management/profiles.md)
|
- [Profiles](package-management/profiles.md)
|
||||||
- [Garbage Collection](package-management/garbage-collection.md)
|
- [Garbage Collection](package-management/garbage-collection.md)
|
||||||
- [Garbage Collector Roots](package-management/garbage-collector-roots.md)
|
- [Garbage Collector Roots](package-management/garbage-collector-roots.md)
|
||||||
- [Advanced Topics](advanced-topics/advanced-topics.md)
|
- [Advanced Topics](advanced-topics/index.md)
|
||||||
- [Sharing Packages Between Machines](package-management/sharing-packages.md)
|
- [Sharing Packages Between Machines](package-management/sharing-packages.md)
|
||||||
- [Serving a Nix store via HTTP](package-management/binary-cache-substituter.md)
|
- [Serving a Nix store via HTTP](package-management/binary-cache-substituter.md)
|
||||||
- [Copying Closures via SSH](package-management/copy-closure.md)
|
- [Copying Closures via SSH](package-management/copy-closure.md)
|
||||||
|
@ -45,7 +47,7 @@
|
||||||
- [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md)
|
- [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md)
|
||||||
- [Verifying Build Reproducibility](advanced-topics/diff-hook.md)
|
- [Verifying Build Reproducibility](advanced-topics/diff-hook.md)
|
||||||
- [Using the `post-build-hook`](advanced-topics/post-build-hook.md)
|
- [Using the `post-build-hook`](advanced-topics/post-build-hook.md)
|
||||||
- [Command Reference](command-ref/command-ref.md)
|
- [Command Reference](command-ref/index.md)
|
||||||
- [Common Options](command-ref/opt-common.md)
|
- [Common Options](command-ref/opt-common.md)
|
||||||
- [Common Environment Variables](command-ref/env-common.md)
|
- [Common Environment Variables](command-ref/env-common.md)
|
||||||
- [Main Commands](command-ref/main-commands.md)
|
- [Main Commands](command-ref/main-commands.md)
|
||||||
|
@ -102,18 +104,18 @@
|
||||||
- [Channels](command-ref/files/channels.md)
|
- [Channels](command-ref/files/channels.md)
|
||||||
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
||||||
- [Architecture and Design](architecture/architecture.md)
|
- [Architecture and Design](architecture/architecture.md)
|
||||||
- [Protocols](protocols/protocols.md)
|
- [Protocols](protocols/index.md)
|
||||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/index.md)
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
- [Testing](contributing/testing.md)
|
- [Testing](contributing/testing.md)
|
||||||
- [Documentation](contributing/documentation.md)
|
- [Documentation](contributing/documentation.md)
|
||||||
- [Experimental Features](contributing/experimental-features.md)
|
- [Experimental Features](contributing/experimental-features.md)
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [C++ style guide](contributing/cxx.md)
|
- [C++ style guide](contributing/cxx.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/index.md)
|
||||||
{{#include ./SUMMARY-rl-next.md}}
|
{{#include ./SUMMARY-rl-next.md}}
|
||||||
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
||||||
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
||||||
|
|
|
@ -52,7 +52,7 @@ The following [concept map] shows its main components (rectangles), the objects
|
||||||
'---------------'
|
'---------------'
|
||||||
```
|
```
|
||||||
|
|
||||||
At the top is the [command line interface](../command-ref/command-ref.md) that drives the underlying layers.
|
At the top is the [command line interface](../command-ref/index.md) that drives the underlying layers.
|
||||||
|
|
||||||
The [Nix language](../language/index.md) evaluator transforms Nix expressions into self-contained *build plans*, which are used to derive *build results* from referenced *build inputs*.
|
The [Nix language](../language/index.md) evaluator transforms Nix expressions into self-contained *build plans*, which are used to derive *build results* from referenced *build inputs*.
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ impacted the most by bad user experience.
|
||||||
and [aligning of text](#text-alignment).
|
and [aligning of text](#text-alignment).
|
||||||
- [Autocomplete](#shell-completion) of options.
|
- [Autocomplete](#shell-completion) of options.
|
||||||
|
|
||||||
Examples of such commands: `nix doctor`, `nix edit`, `nix eval`, ...
|
Examples of such commands: `nix edit`, `nix eval`, ...
|
||||||
|
|
||||||
- **Utility and scripting commands**
|
- **Utility and scripting commands**
|
||||||
|
|
||||||
|
@ -426,7 +426,7 @@ This leads to the following guidelines:
|
||||||
### Examples
|
### Examples
|
||||||
|
|
||||||
|
|
||||||
This is bad, because all keys must be assumed to be store implementations:
|
This is bad, because all keys must be assumed to be store types:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
|
|
|
@ -10,7 +10,7 @@ $ cd nix
|
||||||
|
|
||||||
The following instructions assume you already have some version of Nix installed locally, so that you can use it to set up the development environment. If you don't have it installed, follow the [installation instructions].
|
The following instructions assume you already have some version of Nix installed locally, so that you can use it to set up the development environment. If you don't have it installed, follow the [installation instructions].
|
||||||
|
|
||||||
[installation instructions]: ../installation/installation.md
|
[installation instructions]: ../installation/index.md
|
||||||
|
|
||||||
## Building Nix with flakes
|
## Building Nix with flakes
|
||||||
|
|
||||||
|
@ -146,6 +146,31 @@ $ nix build .#packages.aarch64-linux.default
|
||||||
Cross-compiled builds are available for ARMv6 (`armv6l-linux`) and ARMv7 (`armv7l-linux`).
|
Cross-compiled builds are available for ARMv6 (`armv6l-linux`) and ARMv7 (`armv7l-linux`).
|
||||||
Add more [system types](#system-type) to `crossSystems` in `flake.nix` to bootstrap Nix on unsupported platforms.
|
Add more [system types](#system-type) to `crossSystems` in `flake.nix` to bootstrap Nix on unsupported platforms.
|
||||||
|
|
||||||
|
### Building for multiple platforms at once
|
||||||
|
|
||||||
|
It is useful to perform multiple cross and native builds on the same source tree,
|
||||||
|
for example to ensure that better support for one platform doesn't break the build for another.
|
||||||
|
In order to facilitate this, Nix has some support for being built out of tree – that is, placing build artefacts in a different directory than the source code:
|
||||||
|
|
||||||
|
1. Create a directory for the build, e.g.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir build
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Run the configure script from that directory, e.g.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd build
|
||||||
|
../configure <configure flags>
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Run make from the source directory, but with the build directory specified, e.g.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make builddir=build <make flags>
|
||||||
|
```
|
||||||
|
|
||||||
## System type
|
## System type
|
||||||
|
|
||||||
Nix uses a string with he following format to identify the *system type* or *platform* it runs on:
|
Nix uses a string with he following format to identify the *system type* or *platform* it runs on:
|
||||||
|
@ -232,17 +257,16 @@ User-visible changes should come with a release note.
|
||||||
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
---
|
||||||
synopsis: Basically a title
|
synopsis: Basically a title
|
||||||
issues: #1234
|
issues: 1234
|
||||||
prs: #1238
|
prs: 1238
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Here's one or more paragraphs that describe the change.
|
Here's one or more paragraphs that describe the change.
|
||||||
|
|
||||||
- It's markdown
|
- It's markdown
|
||||||
- Add references to the manual using @docroot@
|
- Add references to the manual using @docroot@
|
||||||
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Significant changes should add the following header, which moves them to the top.
|
Significant changes should add the following header, which moves them to the top.
|
||||||
|
@ -258,3 +282,45 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master
|
||||||
|
|
||||||
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
||||||
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
||||||
|
|
||||||
|
## Branches
|
||||||
|
|
||||||
|
- [`master`](https://github.com/NixOS/nix/commits/master)
|
||||||
|
|
||||||
|
The main development branch. All changes are approved and merged here.
|
||||||
|
When developing a change, create a branch based on the latest `master`.
|
||||||
|
|
||||||
|
Maintainers try to [keep it in a release-worthy state](#reverting).
|
||||||
|
|
||||||
|
- [`maintenance-*.*`](https://github.com/NixOS/nix/branches/all?query=maintenance)
|
||||||
|
|
||||||
|
These branches are the subject of backports only, and are
|
||||||
|
also [kept](#reverting) in a release-worthy state.
|
||||||
|
|
||||||
|
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||||
|
|
||||||
|
- [`latest-release`](https://github.com/NixOS/nix/tree/latest-release)
|
||||||
|
|
||||||
|
The latest patch release of the latest minor version.
|
||||||
|
|
||||||
|
See [`maintainers/release-process.md`](https://github.com/NixOS/nix/blob/master/maintainers/release-process.md)
|
||||||
|
|
||||||
|
- [`backport-*-to-*`](https://github.com/NixOS/nix/branches/all?query=backport)
|
||||||
|
|
||||||
|
Generally branches created by the backport action.
|
||||||
|
|
||||||
|
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||||
|
|
||||||
|
- [_other_](https://github.com/NixOS/nix/branches/all)
|
||||||
|
|
||||||
|
Branches that do not conform to the above patterns should be feature branches.
|
||||||
|
|
||||||
|
## Reverting
|
||||||
|
|
||||||
|
If a change turns out to be merged by mistake, or contain a regression, it may be reverted.
|
||||||
|
A revert is not a rejection of the contribution, but merely part of an effective development process.
|
||||||
|
It makes sure that development keeps running smoothly, with minimal uncertainty, and less overhead.
|
||||||
|
If maintainers have to worry too much about avoiding reverts, they would not be able to merge as much.
|
||||||
|
By embracing reverts as a good part of the development process, everyone wins.
|
||||||
|
|
||||||
|
However, taking a step back may be frustrating, so maintainers will be extra supportive on the next try.
|
||||||
|
|
|
@ -20,6 +20,7 @@ The unit tests are defined using the [googletest] and [rapidcheck] frameworks.
|
||||||
|
|
||||||
[googletest]: https://google.github.io/googletest/
|
[googletest]: https://google.github.io/googletest/
|
||||||
[rapidcheck]: https://github.com/emil-e/rapidcheck
|
[rapidcheck]: https://github.com/emil-e/rapidcheck
|
||||||
|
[property testing]: https://en.wikipedia.org/wiki/Property_testing
|
||||||
|
|
||||||
### Source and header layout
|
### Source and header layout
|
||||||
|
|
||||||
|
@ -28,34 +29,50 @@ The unit tests are defined using the [googletest] and [rapidcheck] frameworks.
|
||||||
> ```
|
> ```
|
||||||
> src
|
> src
|
||||||
> ├── libexpr
|
> ├── libexpr
|
||||||
|
> │ ├── local.mk
|
||||||
> │ ├── value/context.hh
|
> │ ├── value/context.hh
|
||||||
> │ ├── value/context.cc
|
> │ ├── value/context.cc
|
||||||
|
> │ …
|
||||||
|
> │
|
||||||
|
> ├── tests
|
||||||
> │ │
|
> │ │
|
||||||
> │ …
|
> │ …
|
||||||
> └── tests
|
> │ └── unit
|
||||||
> │ ├── value/context.hh
|
> │ ├── libutil
|
||||||
> │ ├── value/context.cc
|
> │ │ ├── local.mk
|
||||||
|
> │ │ …
|
||||||
|
> │ │ └── data
|
||||||
|
> │ │ ├── git/tree.txt
|
||||||
|
> │ │ …
|
||||||
> │ │
|
> │ │
|
||||||
> │ …
|
> │ ├── libexpr-support
|
||||||
> │
|
> │ │ ├── local.mk
|
||||||
> ├── unit-test-data
|
> │ │ └── tests
|
||||||
> │ ├── libstore
|
> │ │ ├── value/context.hh
|
||||||
> │ │ ├── worker-protocol/content-address.bin
|
> │ │ ├── value/context.cc
|
||||||
> │ │ …
|
> │ │ …
|
||||||
> │ …
|
> │ │
|
||||||
|
> │ ├── libexpr
|
||||||
|
> │ … ├── local.mk
|
||||||
|
> │ ├── value/context.cc
|
||||||
|
> │ …
|
||||||
> …
|
> …
|
||||||
> ```
|
> ```
|
||||||
|
|
||||||
The unit tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `src/${library_shortname}/tests` within the directory for the library (`src/${library_shortname}`).
|
The tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `tests/unit/${library_name_without-nix}`.
|
||||||
|
Given a interface (header) and implementation pair in the original library, say, `src/libexpr/value/context.{hh,cc}`, we write tests for it in `tests/unit/libexpr/tests/value/context.cc`, and (possibly) declare/define additional interfaces for testing purposes in `tests/unit/libexpr-support/tests/value/context.{hh,cc}`.
|
||||||
|
|
||||||
The data is in `unit-test-data`, with one subdir per library, with the same name as where the code goes.
|
Data for unit tests is stored in a `data` subdir of the directory for each unit test executable.
|
||||||
For example, `libnixstore` code is in `src/libstore`, and its test data is in `unit-test-data/libstore`.
|
For example, `libnixstore` code is in `src/libstore`, and its test data is in `tests/unit/libstore/data`.
|
||||||
The path to the `unit-test-data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`.
|
The path to the `tests/unit/data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`.
|
||||||
|
Note that each executable only gets the data for its tests.
|
||||||
|
|
||||||
> **Note**
|
The unit test libraries are in `tests/unit/${library_name_without-nix}-lib`.
|
||||||
> Due to the way googletest works, downstream unit test executables will actually include and re-run upstream library tests.
|
All headers are in a `tests` subdirectory so they are included with `#include "tests/"`.
|
||||||
> Therefore it is important that the same value for `_NIX_TEST_UNIT_DATA` be used with the tests for each library.
|
|
||||||
> That is why we have the test data nested within a single `unit-test-data` directory.
|
The use of all these separate directories for the unit tests might seem inconvenient, as for example the tests are not "right next to" the part of the code they are testing.
|
||||||
|
But organizing the tests this way has one big benefit:
|
||||||
|
there is no risk of any build-system wildcards for the library accidentally picking up test code that should not built and installed as part of the library.
|
||||||
|
|
||||||
### Running tests
|
### Running tests
|
||||||
|
|
||||||
|
@ -69,7 +86,7 @@ See [functional characterisation testing](#characterisation-testing-functional)
|
||||||
Like with the functional characterisation, `_NIX_TEST_ACCEPT=1` is also used.
|
Like with the functional characterisation, `_NIX_TEST_ACCEPT=1` is also used.
|
||||||
For example:
|
For example:
|
||||||
```shell-session
|
```shell-session
|
||||||
$ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN
|
$ _NIX_TEST_ACCEPT=1 make libstore-tests_RUN
|
||||||
...
|
...
|
||||||
[ SKIPPED ] WorkerProtoTest.string_read
|
[ SKIPPED ] WorkerProtoTest.string_read
|
||||||
[ SKIPPED ] WorkerProtoTest.string_write
|
[ SKIPPED ] WorkerProtoTest.string_write
|
||||||
|
@ -80,6 +97,18 @@ $ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN
|
||||||
will regenerate the "golden master" expected result for the `libnixstore` characterisation tests.
|
will regenerate the "golden master" expected result for the `libnixstore` characterisation tests.
|
||||||
The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything.
|
The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything.
|
||||||
|
|
||||||
|
### Unit test support libraries
|
||||||
|
|
||||||
|
There are headers and code which are not just used to test the library in question, but also downstream libraries.
|
||||||
|
For example, we do [property testing] with the [rapidcheck] library.
|
||||||
|
This requires writing `Arbitrary` "instances", which are used to describe how to generate values of a given type for the sake of running property tests.
|
||||||
|
Because types contain other types, `Arbitrary` "instances" for some type are not just useful for testing that type, but also any other type that contains it.
|
||||||
|
Downstream types frequently contain upstream types, so it is very important that we share arbitrary instances so that downstream libraries' property tests can also use them.
|
||||||
|
|
||||||
|
It is important that these testing libraries don't contain any actual tests themselves.
|
||||||
|
On some platforms they would be run as part of every test executable that uses them, which is redundant.
|
||||||
|
On other platforms they wouldn't be run at all.
|
||||||
|
|
||||||
## Functional tests
|
## Functional tests
|
||||||
|
|
||||||
The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`.
|
The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`.
|
||||||
|
|
|
@ -257,29 +257,18 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
of the environment (typically, a few hundred kilobyte).
|
of the environment (typically, a few hundred kilobyte).
|
||||||
|
|
||||||
- [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
|
- [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
|
||||||
If this attribute is set to `true` and [distributed building is
|
If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
|
||||||
enabled](../advanced-topics/distributed-builds.md), then, if
|
This is useful for derivations that are cheapest to build locally.
|
||||||
possible, the derivation will be built locally instead of forwarded
|
|
||||||
to a remote machine. This is appropriate for trivial builders
|
|
||||||
where the cost of doing a download or remote build would exceed
|
|
||||||
the cost of building locally.
|
|
||||||
|
|
||||||
- [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
|
- [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
|
||||||
If this attribute is set to `false`, then Nix will always build this
|
If this attribute is set to `false`, then Nix will always build this derivation (locally or remotely); it will not try to substitute its outputs.
|
||||||
derivation; it will not try to substitute its outputs. This is
|
This is useful for derivations that are cheaper to build than to substitute.
|
||||||
useful for very trivial derivations (such as `writeText` in Nixpkgs)
|
|
||||||
that are cheaper to build than to substitute from a binary cache.
|
|
||||||
|
|
||||||
You may disable the effects of this attibute by enabling the
|
This attribute can be ignored by setting [`always-allow-substitutes`](@docroot@/command-ref/conf-file.md#conf-always-allow-substitutes) to `true`.
|
||||||
`always-allow-substitutes` configuration option in Nix.
|
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> You need to have a builder configured which satisfies the
|
> If set to `false`, the [`builder`](./derivations.md#attr-builder) should be able to run on the system type specified in the [`system` attribute](./derivations.md#attr-system), since the derivation cannot be substituted.
|
||||||
> derivation’s `system` attribute, since the derivation cannot be
|
|
||||||
> substituted. Thus it is usually a good idea to align `system` with
|
|
||||||
> `builtins.currentSystem` when setting `allowSubstitutes` to
|
|
||||||
> `false`. For most trivial derivations this should be the case.
|
|
||||||
|
|
||||||
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
||||||
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
# Quick Start
|
# Quick Start
|
||||||
|
|
||||||
This chapter is for impatient people who don't like reading
|
This chapter is for impatient people who don't like reading documentation.
|
||||||
documentation. For more in-depth information you are kindly referred
|
For more in-depth information you are kindly referred to subsequent chapters.
|
||||||
to subsequent chapters.
|
|
||||||
|
|
||||||
1. Install Nix by running the following:
|
1. Install Nix:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ curl -L https://nixos.org/nix/install | sh
|
$ curl -L https://nixos.org/nix/install | sh
|
||||||
|
@ -13,87 +12,33 @@ to subsequent chapters.
|
||||||
The install script will use `sudo`, so make sure you have sufficient rights.
|
The install script will use `sudo`, so make sure you have sufficient rights.
|
||||||
On Linux, `--daemon` can be omitted for a single-user install.
|
On Linux, `--daemon` can be omitted for a single-user install.
|
||||||
|
|
||||||
For other installation methods, see [here](installation/installation.md).
|
For other installation methods, see the detailed [installation instructions](installation/index.md).
|
||||||
|
|
||||||
1. See what installable packages are currently available in the
|
1. Run software without installing it permanently:
|
||||||
channel:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --query --available --attr-path
|
$ nix-shell --packages cowsay lolcat
|
||||||
nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
|
|
||||||
nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
|
|
||||||
nixpkgs.firefox firefox-33.0.2
|
|
||||||
nixpkgs.hello hello-2.9
|
|
||||||
nixpkgs.libxslt libxslt-1.1.28
|
|
||||||
…
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Install some packages from the channel:
|
This downloads the specified packages with all their dependencies, and drops you into a Bash shell where the commands provided by those packages are present.
|
||||||
|
This will not affect your normal environment:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --install --attr nixpkgs.hello
|
[nix-shell:~]$ cowsay Hello, Nix! | lolcat
|
||||||
```
|
```
|
||||||
|
|
||||||
This should download pre-built packages; it should not build them
|
Exiting the shell will make the programs disappear again:
|
||||||
locally (if it does, something went wrong).
|
|
||||||
|
|
||||||
1. Test that they work:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ which hello
|
|
||||||
/home/eelco/.nix-profile/bin/hello
|
|
||||||
$ hello
|
|
||||||
Hello, world!
|
|
||||||
```
|
|
||||||
|
|
||||||
1. Uninstall a package:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env --uninstall hello
|
|
||||||
```
|
|
||||||
|
|
||||||
1. You can also test a package without installing it:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-shell --packages hello
|
|
||||||
```
|
|
||||||
|
|
||||||
This builds or downloads GNU Hello and its dependencies, then drops
|
|
||||||
you into a Bash shell where the `hello` command is present, all
|
|
||||||
without affecting your normal environment:
|
|
||||||
|
|
||||||
```console
|
|
||||||
[nix-shell:~]$ hello
|
|
||||||
Hello, world!
|
|
||||||
|
|
||||||
[nix-shell:~]$ exit
|
[nix-shell:~]$ exit
|
||||||
|
$ lolcat
|
||||||
$ hello
|
lolcat: command not found
|
||||||
hello: command not found
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. To keep up-to-date with the channel, do:
|
1. Search for more packages on <search.nixos.org> to try them out.
|
||||||
|
|
||||||
|
1. Free up storage space:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-channel --update nixpkgs
|
$ nix-collect-garbage
|
||||||
$ nix-env --upgrade '*'
|
|
||||||
```
|
|
||||||
|
|
||||||
The latter command will upgrade each installed package for which
|
|
||||||
there is a “newer” version (as determined by comparing the version
|
|
||||||
numbers).
|
|
||||||
|
|
||||||
1. If you're unhappy with the result of a `nix-env` action (e.g., an
|
|
||||||
upgraded package turned out not to work properly), you can go back:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env --rollback
|
|
||||||
```
|
|
||||||
|
|
||||||
1. You should periodically run the Nix garbage collector to get rid of
|
|
||||||
unused packages, since uninstalls or upgrades don't actually delete
|
|
||||||
them:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-collect-garbage --delete-old
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||||
|
|
||||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||||
As described in the documentation for that feature, this is because we anticipate polishing it and then stabilizing it before the rest of flakes.
|
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||||
|
|
||||||
- The interface for creating and updating lock files has been overhauled:
|
- The interface for creating and updating lock files has been overhauled:
|
||||||
|
|
||||||
|
|
|
@ -2,4 +2,4 @@
|
||||||
|
|
||||||
The *Nix store* is an abstraction to store immutable file system data (such as software packages) that can have dependencies on other such data.
|
The *Nix store* is an abstraction to store immutable file system data (such as software packages) that can have dependencies on other such data.
|
||||||
|
|
||||||
There are multiple implementations of Nix stores with different capabilities, such as the actual filesystem (`/nix/store`) or binary caches.
|
There are [multiple types of Nix stores](./types/index.md) with different capabilities, such as the default one on the [local filesystem](./types/local-store.md) (`/nix/store`) or [binary caches](./types/http-binary-cache-store.md).
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
R"(
|
Nix supports different types of stores:
|
||||||
|
|
||||||
Nix supports different types of stores. These are described below.
|
@store-types@
|
||||||
|
|
||||||
## Store URL format
|
## Store URL format
|
||||||
|
|
||||||
|
@ -29,18 +29,15 @@ supported settings for each store type are documented below.
|
||||||
The special store URL `auto` causes Nix to automatically select a
|
The special store URL `auto` causes Nix to automatically select a
|
||||||
store as follows:
|
store as follows:
|
||||||
|
|
||||||
* Use the [local store](#local-store) `/nix/store` if `/nix/var/nix`
|
* Use the [local store](./local-store.md) `/nix/store` if `/nix/var/nix`
|
||||||
is writable by the current user.
|
is writable by the current user.
|
||||||
|
|
||||||
* Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect
|
* Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect
|
||||||
to the Nix daemon listening on that socket](#local-daemon-store).
|
to the Nix daemon listening on that socket](./local-daemon-store.md).
|
||||||
|
|
||||||
* Otherwise, on Linux only, use the [local chroot store](#local-store)
|
* Otherwise, on Linux only, use the [local chroot store](./local-store.md)
|
||||||
`~/.local/share/nix/root`, which will be created automatically if it
|
`~/.local/share/nix/root`, which will be created automatically if it
|
||||||
does not exist.
|
does not exist.
|
||||||
|
|
||||||
* Otherwise, use the [local store](#local-store) `/nix/store`.
|
* Otherwise, use the [local store](./local-store.md) `/nix/store`.
|
||||||
|
|
||||||
@stores@
|
|
||||||
|
|
||||||
)"
|
|
|
@ -1,5 +1,11 @@
|
||||||
with builtins;
|
with builtins;
|
||||||
|
|
||||||
|
let
|
||||||
|
lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz";
|
||||||
|
upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||||
|
stringToCharacters = s: genList (p: substring p 1 s) (stringLength s);
|
||||||
|
in
|
||||||
|
|
||||||
rec {
|
rec {
|
||||||
splitLines = s: filter (x: !isList x) (split "\n" s);
|
splitLines = s: filter (x: !isList x) (split "\n" s);
|
||||||
|
|
||||||
|
@ -18,6 +24,8 @@ rec {
|
||||||
in
|
in
|
||||||
if replaced == string then string else replaceStringsRec from to replaced;
|
if replaced == string then string else replaceStringsRec from to replaced;
|
||||||
|
|
||||||
|
toLower = replaceStrings upperChars lowerChars;
|
||||||
|
|
||||||
squash = replaceStringsRec "\n\n\n" "\n\n";
|
squash = replaceStringsRec "\n\n\n" "\n\n";
|
||||||
|
|
||||||
trim = string:
|
trim = string:
|
||||||
|
|
25
flake.lock
25
flake.lock
|
@ -32,34 +32,18 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"lowdown-src": {
|
|
||||||
"flake": false,
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1633514407,
|
|
||||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
|
||||||
"owner": "kristapsdz",
|
|
||||||
"repo": "lowdown",
|
|
||||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "kristapsdz",
|
|
||||||
"repo": "lowdown",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1700748986,
|
"lastModified": 1701355166,
|
||||||
"narHash": "sha256-/nqLrNU297h3PCw4QyDpZKZEUHmialJdZW2ceYFobds=",
|
"narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "9ba29e2346bc542e9909d1021e8fd7d4b3f64db0",
|
"rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-23.05-small",
|
"ref": "staging-23.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
@ -84,7 +68,6 @@
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-compat": "flake-compat",
|
"flake-compat": "flake-compat",
|
||||||
"libgit2": "libgit2",
|
"libgit2": "libgit2",
|
||||||
"lowdown-src": "lowdown-src",
|
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"nixpkgs-regression": "nixpkgs-regression"
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
}
|
}
|
||||||
|
|
720
flake.nix
720
flake.nix
|
@ -1,17 +1,34 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
# TODO Go back to nixos-23.05-small once
|
||||||
|
# https://github.com/NixOS/nixpkgs/pull/271202 is merged.
|
||||||
|
#
|
||||||
|
# Also, do not grab arbitrary further staging commits. This PR was
|
||||||
|
# carefully made to be based on release-23.05 and just contain
|
||||||
|
# rebuild-causing changes to packages that Nix actually uses.
|
||||||
|
#
|
||||||
|
# Once this is updated to something containing
|
||||||
|
# https://github.com/NixOS/nixpkgs/pull/271423, don't forget
|
||||||
|
# to remove the `nix.checkAllErrors = false;` line in the tests.
|
||||||
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05";
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
|
||||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||||
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
|
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
|
||||||
|
|
||||||
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat, libgit2 }:
|
outputs = { self, nixpkgs, nixpkgs-regression, libgit2, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (nixpkgs) lib;
|
inherit (nixpkgs) lib;
|
||||||
|
|
||||||
|
# Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
|
||||||
|
# Not an "idiomatic" flake input because:
|
||||||
|
# - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
|
||||||
|
# - Subflake would download redundant and huge parent flake
|
||||||
|
# - No git tree hash support: https://github.com/NixOS/nix/issues/6044
|
||||||
|
inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
|
||||||
|
fileset;
|
||||||
|
|
||||||
officialRelease = false;
|
officialRelease = false;
|
||||||
|
|
||||||
# Set to true to build the release notes for the next release.
|
# Set to true to build the release notes for the next release.
|
||||||
|
@ -30,11 +47,27 @@
|
||||||
systems = linuxSystems ++ darwinSystems;
|
systems = linuxSystems ++ darwinSystems;
|
||||||
|
|
||||||
crossSystems = [
|
crossSystems = [
|
||||||
"armv6l-linux" "armv7l-linux"
|
"armv6l-unknown-linux-gnueabihf"
|
||||||
"x86_64-freebsd13" "x86_64-netbsd"
|
"armv7l-unknown-linux-gnueabihf"
|
||||||
|
"x86_64-unknown-freebsd13"
|
||||||
|
"x86_64-unknown-netbsd"
|
||||||
];
|
];
|
||||||
|
|
||||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
# Nix doesn't yet build on this platform, so we put it in a
|
||||||
|
# separate list. We just use this for `devShells` and
|
||||||
|
# `nixpkgsFor`, which this depends on.
|
||||||
|
shellCrossSystems = crossSystems ++ [
|
||||||
|
"x86_64-w64-mingw32"
|
||||||
|
];
|
||||||
|
|
||||||
|
stdenvs = [
|
||||||
|
"ccacheStdenv"
|
||||||
|
"clang11Stdenv"
|
||||||
|
"clangStdenv"
|
||||||
|
"gccStdenv"
|
||||||
|
"libcxxStdenv"
|
||||||
|
"stdenv"
|
||||||
|
];
|
||||||
|
|
||||||
forAllSystems = lib.genAttrs systems;
|
forAllSystems = lib.genAttrs systems;
|
||||||
|
|
||||||
|
@ -49,57 +82,6 @@
|
||||||
})
|
})
|
||||||
stdenvs);
|
stdenvs);
|
||||||
|
|
||||||
# Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
|
|
||||||
# Not an "idiomatic" flake input because:
|
|
||||||
# - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
|
|
||||||
# - Subflake would download redundant and huge parent flake
|
|
||||||
# - No git tree hash support: https://github.com/NixOS/nix/issues/6044
|
|
||||||
inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
|
|
||||||
fileset;
|
|
||||||
|
|
||||||
baseFiles =
|
|
||||||
# .gitignore has already been processed, so any changes in it are irrelevant
|
|
||||||
# at this point. It is not represented verbatim for test purposes because
|
|
||||||
# that would interfere with repo semantics.
|
|
||||||
fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
|
||||||
|
|
||||||
configureFiles = fileset.unions [
|
|
||||||
./.version
|
|
||||||
./configure.ac
|
|
||||||
./m4
|
|
||||||
# TODO: do we really need README.md? It doesn't seem used in the build.
|
|
||||||
./README.md
|
|
||||||
];
|
|
||||||
|
|
||||||
topLevelBuildFiles = fileset.unions [
|
|
||||||
./local.mk
|
|
||||||
./Makefile
|
|
||||||
./Makefile.config.in
|
|
||||||
./mk
|
|
||||||
];
|
|
||||||
|
|
||||||
functionalTestFiles = fileset.unions [
|
|
||||||
./tests/functional
|
|
||||||
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
|
||||||
];
|
|
||||||
|
|
||||||
nixSrc = fileset.toSource {
|
|
||||||
root = ./.;
|
|
||||||
fileset = fileset.intersect baseFiles (fileset.unions [
|
|
||||||
configureFiles
|
|
||||||
topLevelBuildFiles
|
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
|
||||||
./doc
|
|
||||||
./misc
|
|
||||||
./precompiled-headers.h
|
|
||||||
./src
|
|
||||||
./unit-test-data
|
|
||||||
./COPYING
|
|
||||||
./scripts/local.mk
|
|
||||||
functionalTestFiles
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
# Memoize nixpkgs for different platforms for efficiency.
|
# Memoize nixpkgs for different platforms for efficiency.
|
||||||
nixpkgsFor = forAllSystems
|
nixpkgsFor = forAllSystems
|
||||||
(system: let
|
(system: let
|
||||||
|
@ -108,8 +90,8 @@
|
||||||
inherit system;
|
inherit system;
|
||||||
};
|
};
|
||||||
crossSystem = if crossSystem == null then null else {
|
crossSystem = if crossSystem == null then null else {
|
||||||
system = crossSystem;
|
config = crossSystem;
|
||||||
} // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") {
|
} // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
|
||||||
useLLVM = true;
|
useLLVM = true;
|
||||||
};
|
};
|
||||||
overlays = [
|
overlays = [
|
||||||
|
@ -121,414 +103,117 @@
|
||||||
in {
|
in {
|
||||||
inherit stdenvs native;
|
inherit stdenvs native;
|
||||||
static = native.pkgsStatic;
|
static = native.pkgsStatic;
|
||||||
cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
|
cross = lib.genAttrs shellCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
|
||||||
});
|
});
|
||||||
|
|
||||||
commonDeps =
|
installScriptFor = tarballs:
|
||||||
{ pkgs
|
nixpkgsFor.x86_64-linux.native.callPackage ./scripts/installer.nix {
|
||||||
, isStatic ? pkgs.stdenv.hostPlatform.isStatic
|
inherit tarballs;
|
||||||
}:
|
|
||||||
with pkgs; rec {
|
|
||||||
# Use "busybox-sandbox-shell" if present,
|
|
||||||
# if not (legacy) fallback and hope it's sufficient.
|
|
||||||
sh = pkgs.busybox-sandbox-shell or (busybox.override {
|
|
||||||
useMusl = true;
|
|
||||||
enableStatic = true;
|
|
||||||
enableMinimal = true;
|
|
||||||
extraConfig = ''
|
|
||||||
CONFIG_FEATURE_FANCY_ECHO y
|
|
||||||
CONFIG_FEATURE_SH_MATH y
|
|
||||||
CONFIG_FEATURE_SH_MATH_64 y
|
|
||||||
|
|
||||||
CONFIG_ASH y
|
|
||||||
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
|
|
||||||
|
|
||||||
CONFIG_ASH_ALIAS y
|
|
||||||
CONFIG_ASH_BASH_COMPAT y
|
|
||||||
CONFIG_ASH_CMDCMD y
|
|
||||||
CONFIG_ASH_ECHO y
|
|
||||||
CONFIG_ASH_GETOPTS y
|
|
||||||
CONFIG_ASH_INTERNAL_GLOB y
|
|
||||||
CONFIG_ASH_JOB_CONTROL y
|
|
||||||
CONFIG_ASH_PRINTF y
|
|
||||||
CONFIG_ASH_TEST y
|
|
||||||
'';
|
|
||||||
});
|
|
||||||
|
|
||||||
configureFlags =
|
|
||||||
lib.optionals stdenv.isLinux [
|
|
||||||
"--with-boost=${boost-nix}/lib"
|
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
|
||||||
]
|
|
||||||
++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [
|
|
||||||
"LDFLAGS=-fuse-ld=gold"
|
|
||||||
];
|
|
||||||
|
|
||||||
testConfigureFlags = [
|
|
||||||
"RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"
|
|
||||||
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
|
|
||||||
"--enable-install-unit-tests"
|
|
||||||
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
|
|
||||||
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
|
|
||||||
];
|
|
||||||
|
|
||||||
internalApiDocsConfigureFlags = [
|
|
||||||
"--enable-internal-api-docs"
|
|
||||||
];
|
|
||||||
|
|
||||||
changelog-d = pkgs.buildPackages.changelog-d;
|
|
||||||
|
|
||||||
nativeBuildDeps =
|
|
||||||
[
|
|
||||||
buildPackages.bison
|
|
||||||
buildPackages.flex
|
|
||||||
(lib.getBin buildPackages.lowdown-nix)
|
|
||||||
buildPackages.mdbook
|
|
||||||
buildPackages.mdbook-linkcheck
|
|
||||||
buildPackages.autoconf-archive
|
|
||||||
buildPackages.autoreconfHook
|
|
||||||
buildPackages.pkg-config
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
buildPackages.git
|
|
||||||
buildPackages.mercurial # FIXME: remove? only needed for tests
|
|
||||||
buildPackages.jq # Also for custom mdBook preprocessor.
|
|
||||||
buildPackages.openssh # only needed for tests (ssh-keygen)
|
|
||||||
]
|
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)]
|
|
||||||
# Official releases don't have rl-next, so we don't need to compile a changelog
|
|
||||||
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
|
|
||||||
;
|
|
||||||
|
|
||||||
buildDeps =
|
|
||||||
[ curl
|
|
||||||
bzip2 xz brotli editline
|
|
||||||
openssl sqlite
|
|
||||||
libarchive
|
|
||||||
(pkgs.libgit2.overrideAttrs (attrs: {
|
|
||||||
src = libgit2;
|
|
||||||
version = libgit2.lastModifiedDate;
|
|
||||||
cmakeFlags = (attrs.cmakeFlags or []) ++ ["-DUSE_SSH=exec"];
|
|
||||||
}))
|
|
||||||
boost-nix
|
|
||||||
lowdown-nix
|
|
||||||
libsodium
|
|
||||||
]
|
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
|
||||||
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
|
||||||
|
|
||||||
checkDeps = [
|
|
||||||
gtest
|
|
||||||
rapidcheck
|
|
||||||
];
|
|
||||||
|
|
||||||
internalApiDocsDeps = [
|
|
||||||
buildPackages.doxygen
|
|
||||||
];
|
|
||||||
|
|
||||||
awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin)
|
|
||||||
(aws-sdk-cpp.override {
|
|
||||||
apis = ["s3" "transfer"];
|
|
||||||
customMemoryManagement = false;
|
|
||||||
});
|
|
||||||
|
|
||||||
propagatedDeps =
|
|
||||||
[ ((boehmgc.override {
|
|
||||||
enableLargeConfig = true;
|
|
||||||
}).overrideAttrs(o: {
|
|
||||||
patches = (o.patches or []) ++ [
|
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
|
||||||
];
|
|
||||||
})
|
|
||||||
)
|
|
||||||
nlohmann_json
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
installScriptFor = systems:
|
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
|
||||||
runCommand "installer-script"
|
|
||||||
{ buildInputs = [ nix ];
|
|
||||||
}
|
|
||||||
''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
|
|
||||||
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
|
||||||
tarballPath() {
|
|
||||||
# Remove the store prefix
|
|
||||||
local path=''${1#${builtins.storeDir}/}
|
|
||||||
# Get the path relative to the derivation root
|
|
||||||
local rest=''${path#*/}
|
|
||||||
# Get the derivation hash
|
|
||||||
local drvHash=''${path%%-*}
|
|
||||||
echo "$drvHash/$rest"
|
|
||||||
}
|
|
||||||
|
|
||||||
substitute ${./scripts/install.in} $out/install \
|
|
||||||
${pkgs.lib.concatMapStrings
|
|
||||||
(system: let
|
|
||||||
tarball = if builtins.elem system crossSystems then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} else self.hydraJobs.binaryTarball.${system};
|
|
||||||
in '' \
|
|
||||||
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
|
|
||||||
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
|
|
||||||
''
|
|
||||||
)
|
|
||||||
systems
|
|
||||||
} --replace '@nixVersion@' ${version}
|
|
||||||
|
|
||||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
|
|
||||||
testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation {
|
|
||||||
NIX_DAEMON_PACKAGE = daemon;
|
|
||||||
NIX_CLIENT_PACKAGE = client;
|
|
||||||
name =
|
|
||||||
"nix-tests"
|
|
||||||
+ optionalString
|
|
||||||
(versionAtLeast daemon.version "2.4pre20211005" &&
|
|
||||||
versionAtLeast client.version "2.4pre20211005")
|
|
||||||
"-${client.version}-against-${daemon.version}";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = fileset.toSource {
|
|
||||||
root = ./.;
|
|
||||||
fileset = fileset.intersect baseFiles (fileset.unions [
|
|
||||||
configureFiles
|
|
||||||
topLevelBuildFiles
|
|
||||||
functionalTestFiles
|
|
||||||
]);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
VERSION_SUFFIX = versionSuffix;
|
testNixVersions = pkgs: client: daemon:
|
||||||
|
pkgs.callPackage ./package.nix {
|
||||||
|
pname =
|
||||||
|
"nix-tests"
|
||||||
|
+ lib.optionalString
|
||||||
|
(lib.versionAtLeast daemon.version "2.4pre20211005" &&
|
||||||
|
lib.versionAtLeast client.version "2.4pre20211005")
|
||||||
|
"-${client.version}-against-${daemon.version}";
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
inherit fileset;
|
||||||
buildInputs = buildDeps ++ awsDeps ++ checkDeps;
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
test-client = client;
|
||||||
|
test-daemon = daemon;
|
||||||
|
|
||||||
configureFlags =
|
doBuild = false;
|
||||||
testConfigureFlags # otherwise configure fails
|
};
|
||||||
++ [ "--disable-build" ];
|
|
||||||
dontBuild = true;
|
|
||||||
doInstallCheck = true;
|
|
||||||
|
|
||||||
installPhase = ''
|
binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix {
|
||||||
mkdir -p $out
|
inherit nix;
|
||||||
'';
|
|
||||||
|
|
||||||
installCheckPhase = ''
|
|
||||||
mkdir -p src/nix-channel
|
|
||||||
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
|
|
||||||
'';
|
|
||||||
};
|
};
|
||||||
|
|
||||||
binaryTarball = nix: pkgs:
|
|
||||||
let
|
|
||||||
inherit (pkgs) buildPackages;
|
|
||||||
inherit (pkgs) cacert;
|
|
||||||
installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
|
|
||||||
in
|
|
||||||
|
|
||||||
buildPackages.runCommand "nix-binary-tarball-${version}"
|
|
||||||
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
|
||||||
meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
|
|
||||||
}
|
|
||||||
''
|
|
||||||
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
|
||||||
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
|
||||||
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
|
|
||||||
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
|
|
||||||
if type -p shellcheck; then
|
|
||||||
# SC1090: Don't worry about not being able to find
|
|
||||||
# $nix/etc/profile.d/nix.sh
|
|
||||||
shellcheck --exclude SC1090 $TMPDIR/install
|
|
||||||
shellcheck $TMPDIR/create-darwin-volume.sh
|
|
||||||
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
|
||||||
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
|
||||||
|
|
||||||
# SC1091: Don't panic about not being able to source
|
|
||||||
# /etc/profile
|
|
||||||
# SC2002: Ignore "useless cat" "error", when loading
|
|
||||||
# .reginfo, as the cat is a much cleaner
|
|
||||||
# implementation, even though it is "useless"
|
|
||||||
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
|
||||||
# root's home directory
|
|
||||||
shellcheck --external-sources \
|
|
||||||
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
|
||||||
fi
|
|
||||||
|
|
||||||
chmod +x $TMPDIR/install
|
|
||||||
chmod +x $TMPDIR/create-darwin-volume.sh
|
|
||||||
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
|
||||||
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
|
||||||
chmod +x $TMPDIR/install-multi-user
|
|
||||||
dir=nix-${version}-${pkgs.system}
|
|
||||||
fn=$out/$dir.tar.xz
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
|
||||||
tar cvfJ $fn \
|
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
|
||||||
--mtime='1970-01-01' \
|
|
||||||
--absolute-names \
|
|
||||||
--hard-dereference \
|
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
|
||||||
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
|
||||||
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
|
||||||
--transform "s,$NIX_STORE,$dir/store,S" \
|
|
||||||
$TMPDIR/install \
|
|
||||||
$TMPDIR/create-darwin-volume.sh \
|
|
||||||
$TMPDIR/install-darwin-multi-user.sh \
|
|
||||||
$TMPDIR/install-systemd-multi-user.sh \
|
|
||||||
$TMPDIR/install-multi-user \
|
|
||||||
$TMPDIR/reginfo \
|
|
||||||
$(cat ${installerClosureInfo}/store-paths)
|
|
||||||
'';
|
|
||||||
|
|
||||||
overlayFor = getStdenv: final: prev:
|
overlayFor = getStdenv: final: prev:
|
||||||
let currentStdenv = getStdenv final; in
|
let
|
||||||
|
stdenv = getStdenv final;
|
||||||
|
in
|
||||||
{
|
{
|
||||||
nixStable = prev.nix;
|
nixStable = prev.nix;
|
||||||
|
|
||||||
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
default-busybox-sandbox-shell = final.busybox.override {
|
||||||
nixUnstable = prev.nixUnstable;
|
useMusl = true;
|
||||||
|
enableStatic = true;
|
||||||
|
enableMinimal = true;
|
||||||
|
extraConfig = ''
|
||||||
|
CONFIG_FEATURE_FANCY_ECHO y
|
||||||
|
CONFIG_FEATURE_SH_MATH y
|
||||||
|
CONFIG_FEATURE_SH_MATH_64 y
|
||||||
|
|
||||||
nix =
|
CONFIG_ASH y
|
||||||
with final;
|
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
|
||||||
with commonDeps {
|
|
||||||
inherit pkgs;
|
|
||||||
inherit (currentStdenv.hostPlatform) isStatic;
|
|
||||||
};
|
|
||||||
let
|
|
||||||
canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform;
|
|
||||||
in currentStdenv.mkDerivation (finalAttrs: {
|
|
||||||
name = "nix-${version}";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = nixSrc;
|
CONFIG_ASH_ALIAS y
|
||||||
VERSION_SUFFIX = versionSuffix;
|
CONFIG_ASH_BASH_COMPAT y
|
||||||
|
CONFIG_ASH_CMDCMD y
|
||||||
outputs = [ "out" "dev" "doc" ]
|
CONFIG_ASH_ECHO y
|
||||||
++ lib.optional (currentStdenv.hostPlatform != currentStdenv.buildPlatform) "check";
|
CONFIG_ASH_GETOPTS y
|
||||||
|
CONFIG_ASH_INTERNAL_GLOB y
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
CONFIG_ASH_JOB_CONTROL y
|
||||||
buildInputs = buildDeps
|
CONFIG_ASH_PRINTF y
|
||||||
# There have been issues building these dependencies
|
CONFIG_ASH_TEST y
|
||||||
++ lib.optionals (currentStdenv.hostPlatform == currentStdenv.buildPlatform) awsDeps
|
|
||||||
++ lib.optionals finalAttrs.doCheck checkDeps;
|
|
||||||
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
disallowedReferences = [ boost-nix ];
|
|
||||||
|
|
||||||
preConfigure = lib.optionalString (! currentStdenv.hostPlatform.isStatic)
|
|
||||||
''
|
|
||||||
# Copy libboost_context so we don't get all of Boost in our closure.
|
|
||||||
# https://github.com/NixOS/nixpkgs/issues/45462
|
|
||||||
mkdir -p $out/lib
|
|
||||||
cp -pd ${boost-nix}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib
|
|
||||||
rm -f $out/lib/*.a
|
|
||||||
${lib.optionalString currentStdenv.hostPlatform.isLinux ''
|
|
||||||
chmod u+w $out/lib/*.so.*
|
|
||||||
patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
|
||||||
''}
|
|
||||||
${lib.optionalString currentStdenv.hostPlatform.isDarwin ''
|
|
||||||
for LIB in $out/lib/*.dylib; do
|
|
||||||
chmod u+w $LIB
|
|
||||||
install_name_tool -id $LIB $LIB
|
|
||||||
install_name_tool -delete_rpath ${boost-nix}/lib/ $LIB || true
|
|
||||||
done
|
|
||||||
install_name_tool -change ${boost-nix}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
|
||||||
''}
|
|
||||||
'';
|
|
||||||
|
|
||||||
configureFlags = configureFlags ++
|
|
||||||
[ "--sysconfdir=/etc" ] ++
|
|
||||||
lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" ++
|
|
||||||
[ (lib.enableFeature finalAttrs.doCheck "tests") ] ++
|
|
||||||
lib.optionals finalAttrs.doCheck testConfigureFlags ++
|
|
||||||
lib.optional (!canRunInstalled) "--disable-doc-gen";
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
|
||||||
|
|
||||||
doCheck = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $doc/nix-support
|
|
||||||
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
|
||||||
${lib.optionalString currentStdenv.hostPlatform.isStatic ''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
|
||||||
''}
|
|
||||||
${lib.optionalString currentStdenv.isDarwin ''
|
|
||||||
install_name_tool \
|
|
||||||
-change ${boost-nix}/lib/libboost_context.dylib \
|
|
||||||
$out/lib/libboost_context.dylib \
|
|
||||||
$out/lib/libnixutil.dylib
|
|
||||||
install_name_tool \
|
|
||||||
-change ${boost-nix}/lib/libboost_regex.dylib \
|
|
||||||
$out/lib/libboost_regex.dylib \
|
|
||||||
$out/lib/libnixexpr.dylib
|
|
||||||
''}
|
|
||||||
'';
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
doInstallCheck = finalAttrs.doCheck;
|
libgit2-nix = final.libgit2.overrideAttrs (attrs: {
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
src = libgit2;
|
||||||
installCheckTarget = "installcheck"; # work around buggy detection in stdenv
|
version = libgit2.lastModifiedDate;
|
||||||
|
cmakeFlags = attrs.cmakeFlags or []
|
||||||
separateDebugInfo = !currentStdenv.hostPlatform.isStatic;
|
++ [ "-DUSE_SSH=exec" ];
|
||||||
|
|
||||||
strictDeps = true;
|
|
||||||
|
|
||||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
|
||||||
|
|
||||||
passthru.perl-bindings = final.callPackage ./perl {
|
|
||||||
inherit fileset;
|
|
||||||
stdenv = currentStdenv;
|
|
||||||
};
|
|
||||||
|
|
||||||
meta.platforms = lib.platforms.unix;
|
|
||||||
meta.mainProgram = "nix";
|
|
||||||
});
|
});
|
||||||
|
|
||||||
boost-nix = final.boost.override {
|
boehmgc-nix = (final.boehmgc.override {
|
||||||
enableIcu = false;
|
enableLargeConfig = true;
|
||||||
|
}).overrideAttrs(o: {
|
||||||
|
patches = (o.patches or []) ++ [
|
||||||
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
|
|
||||||
|
# https://github.com/ivmai/bdwgc/pull/586
|
||||||
|
./boehmgc-traceable_allocator-public.diff
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
changelog-d-nix = final.buildPackages.callPackage ./misc/changelog-d.nix { };
|
||||||
|
|
||||||
|
nix =
|
||||||
|
let
|
||||||
|
officialRelease = false;
|
||||||
|
versionSuffix =
|
||||||
|
if officialRelease
|
||||||
|
then ""
|
||||||
|
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
|
||||||
|
|
||||||
|
in final.callPackage ./package.nix {
|
||||||
|
inherit
|
||||||
|
fileset
|
||||||
|
stdenv
|
||||||
|
versionSuffix
|
||||||
|
;
|
||||||
|
officialRelease = false;
|
||||||
|
boehmgc = final.boehmgc-nix;
|
||||||
|
libgit2 = final.libgit2-nix;
|
||||||
|
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
||||||
|
changelog-d = final.changelog-d-nix;
|
||||||
|
} // {
|
||||||
|
# this is a proper separate downstream package, but put
|
||||||
|
# here also for back compat reasons.
|
||||||
|
perl-bindings = final.nix-perl-bindings;
|
||||||
|
};
|
||||||
|
|
||||||
|
nix-perl-bindings = final.callPackage ./perl {
|
||||||
|
inherit fileset stdenv;
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
lowdown-nix = with final; currentStdenv.mkDerivation rec {
|
|
||||||
name = "lowdown-0.9.0";
|
|
||||||
|
|
||||||
src = lowdown-src;
|
|
||||||
|
|
||||||
outputs = [ "out" "bin" "dev" ];
|
|
||||||
|
|
||||||
nativeBuildInputs = [ buildPackages.which ];
|
|
||||||
|
|
||||||
configurePhase = ''
|
|
||||||
${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
|
|
||||||
./configure \
|
|
||||||
PREFIX=${placeholder "dev"} \
|
|
||||||
BINDIR=${placeholder "bin"}/bin
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
in {
|
in {
|
||||||
# A Nixpkgs overlay that overrides the 'nix' and
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
# 'nix.perl-bindings' packages.
|
# 'nix.perl-bindings' packages.
|
||||||
|
@ -539,6 +224,8 @@
|
||||||
# Binary package for various platforms.
|
# Binary package for various platforms.
|
||||||
build = forAllSystems (system: self.packages.${system}.nix);
|
build = forAllSystems (system: self.packages.${system}.nix);
|
||||||
|
|
||||||
|
shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation);
|
||||||
|
|
||||||
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
||||||
|
|
||||||
buildCross = forAllCrossSystems (crossSystem:
|
buildCross = forAllCrossSystems (crossSystem:
|
||||||
|
@ -572,67 +259,41 @@
|
||||||
# to https://nixos.org/nix/install. It downloads the binary
|
# to https://nixos.org/nix/install. It downloads the binary
|
||||||
# tarball for the user's system and calls the second half of the
|
# tarball for the user's system and calls the second half of the
|
||||||
# installation script.
|
# installation script.
|
||||||
installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ];
|
installerScript = installScriptFor [
|
||||||
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
|
# Native
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-linux"
|
||||||
|
self.hydraJobs.binaryTarball."i686-linux"
|
||||||
|
self.hydraJobs.binaryTarball."aarch64-linux"
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-darwin"
|
||||||
|
self.hydraJobs.binaryTarball."aarch64-darwin"
|
||||||
|
# Cross
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf"
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf"
|
||||||
|
];
|
||||||
|
installerScriptForGHA = installScriptFor [
|
||||||
|
# Native
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-linux"
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-darwin"
|
||||||
|
# Cross
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf"
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf"
|
||||||
|
];
|
||||||
|
|
||||||
# docker image with Nix inside
|
# docker image with Nix inside
|
||||||
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
||||||
|
|
||||||
# Line coverage analysis.
|
# Line coverage analysis.
|
||||||
coverage =
|
coverage = nixpkgsFor.x86_64-linux.native.nix.override {
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
pname = "nix-coverage";
|
||||||
with commonDeps { inherit pkgs; };
|
withCoverageChecks = true;
|
||||||
|
};
|
||||||
releaseTools.coverageAnalysis {
|
|
||||||
name = "nix-coverage-${version}";
|
|
||||||
|
|
||||||
src = nixSrc;
|
|
||||||
|
|
||||||
configureFlags = testConfigureFlags;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ checkDeps;
|
|
||||||
|
|
||||||
dontInstall = false;
|
|
||||||
|
|
||||||
doInstallCheck = true;
|
|
||||||
installCheckTarget = "installcheck"; # work around buggy detection in stdenv
|
|
||||||
|
|
||||||
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
|
||||||
|
|
||||||
hardeningDisable = ["fortify"];
|
|
||||||
|
|
||||||
NIX_CFLAGS_COMPILE = "-DCOVERAGE=1";
|
|
||||||
};
|
|
||||||
|
|
||||||
# API docs for Nix's unstable internal C++ interfaces.
|
# API docs for Nix's unstable internal C++ interfaces.
|
||||||
internal-api-docs =
|
internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./package.nix {
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
inherit fileset;
|
||||||
with commonDeps { inherit pkgs; };
|
doBuild = false;
|
||||||
|
enableInternalAPIDocs = true;
|
||||||
stdenv.mkDerivation {
|
};
|
||||||
pname = "nix-internal-api-docs";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = nixSrc;
|
|
||||||
|
|
||||||
configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags;
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps
|
|
||||||
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
|
||||||
|
|
||||||
dontBuild = true;
|
|
||||||
|
|
||||||
installTargets = [ "internal-api-html" ];
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
# System tests.
|
# System tests.
|
||||||
tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // {
|
tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // {
|
||||||
|
@ -640,7 +301,9 @@
|
||||||
# Make sure that nix-env still produces the exact same result
|
# Make sure that nix-env still produces the exact same result
|
||||||
# on a particular version of Nixpkgs.
|
# on a particular version of Nixpkgs.
|
||||||
evalNixpkgs =
|
evalNixpkgs =
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
let
|
||||||
|
inherit (nixpkgsFor.x86_64-linux.native) runCommand nix;
|
||||||
|
in
|
||||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||||
''
|
''
|
||||||
type -p nix-env
|
type -p nix-env
|
||||||
|
@ -690,6 +353,11 @@
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
installTests = self.hydraJobs.installTests.${system};
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||||
|
rl-next =
|
||||||
|
let pkgs = nixpkgsFor.${system}.native;
|
||||||
|
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
|
||||||
|
LANG=C.UTF-8 ${pkgs.changelog-d-nix}/bin/changelog-d ${./doc/manual/rl-next} >$out
|
||||||
|
'';
|
||||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
});
|
});
|
||||||
|
@ -727,47 +395,17 @@
|
||||||
stdenvs)));
|
stdenvs)));
|
||||||
|
|
||||||
devShells = let
|
devShells = let
|
||||||
makeShell = pkgs: stdenv:
|
makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (_: {
|
||||||
let
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
shellHook = ''
|
||||||
in
|
PATH=$prefix/bin:$PATH
|
||||||
with commonDeps { inherit pkgs; };
|
unset PYTHONPATH
|
||||||
stdenv.mkDerivation {
|
export MANPATH=$out/share/man:$MANPATH
|
||||||
name = "nix";
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ]
|
# Make bash completion work.
|
||||||
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "check";
|
XDG_DATA_DIRS+=:$out/share
|
||||||
|
'';
|
||||||
nativeBuildInputs = nativeBuildDeps
|
});
|
||||||
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
|
||||||
++ lib.optional
|
|
||||||
(stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
|
|
||||||
pkgs.buildPackages.clang-tools
|
|
||||||
# We want changelog-d in the shell even if the current build doesn't need it
|
|
||||||
++ lib.optional (officialRelease || ! buildUnreleasedNotes) changelog-d
|
|
||||||
;
|
|
||||||
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps
|
|
||||||
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
|
||||||
|
|
||||||
configureFlags = configureFlags
|
|
||||||
++ testConfigureFlags ++ internalApiDocsConfigureFlags
|
|
||||||
++ lib.optional (!canRunInstalled) "--disable-doc-gen";
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
shellHook =
|
|
||||||
''
|
|
||||||
PATH=$prefix/bin:$PATH
|
|
||||||
unset PYTHONPATH
|
|
||||||
export MANPATH=$out/share/man:$MANPATH
|
|
||||||
|
|
||||||
# Make bash completion work.
|
|
||||||
XDG_DATA_DIRS+=:$out/share
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
in
|
in
|
||||||
forAllSystems (system:
|
forAllSystems (system:
|
||||||
let
|
let
|
||||||
|
@ -778,7 +416,7 @@
|
||||||
in
|
in
|
||||||
(makeShells "native" nixpkgsFor.${system}.native) //
|
(makeShells "native" nixpkgsFor.${system}.native) //
|
||||||
(makeShells "static" nixpkgsFor.${system}.static) //
|
(makeShells "static" nixpkgsFor.${system}.static) //
|
||||||
(forAllCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
|
(lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
|
||||||
{
|
{
|
||||||
default = self.devShells.${system}.native-stdenvPackages;
|
default = self.devShells.${system}.native-stdenvPackages;
|
||||||
}
|
}
|
||||||
|
|
|
@ -154,8 +154,8 @@ downloadFile("binaryTarball.x86_64-linux", "1");
|
||||||
downloadFile("binaryTarball.aarch64-linux", "1");
|
downloadFile("binaryTarball.aarch64-linux", "1");
|
||||||
downloadFile("binaryTarball.x86_64-darwin", "1");
|
downloadFile("binaryTarball.x86_64-darwin", "1");
|
||||||
downloadFile("binaryTarball.aarch64-darwin", "1");
|
downloadFile("binaryTarball.aarch64-darwin", "1");
|
||||||
downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1");
|
downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
|
||||||
downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1");
|
downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
|
||||||
downloadFile("installerScript", "1");
|
downloadFile("installerScript", "1");
|
||||||
|
|
||||||
# Upload docker images to dockerhub.
|
# Upload docker images to dockerhub.
|
||||||
|
|
31
misc/changelog-d.cabal.nix
Normal file
31
misc/changelog-d.cabal.nix
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
{ mkDerivation, aeson, base, bytestring, cabal-install-parsers
|
||||||
|
, Cabal-syntax, containers, directory, filepath, frontmatter
|
||||||
|
, generic-lens-lite, lib, mtl, optparse-applicative, parsec, pretty
|
||||||
|
, regex-applicative, text, pkgs
|
||||||
|
}:
|
||||||
|
let rev = "f30f6969e9cd8b56242309639d58acea21c99d06";
|
||||||
|
in
|
||||||
|
mkDerivation {
|
||||||
|
pname = "changelog-d";
|
||||||
|
version = "0.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
name = "changelog-d-${rev}.tar.gz";
|
||||||
|
url = "https://codeberg.org/roberth/changelog-d/archive/${rev}.tar.gz";
|
||||||
|
hash = "sha256-8a2+i5u7YoszAgd5OIEW0eYUcP8yfhtoOIhLJkylYJ4=";
|
||||||
|
} // { inherit rev; };
|
||||||
|
isLibrary = false;
|
||||||
|
isExecutable = true;
|
||||||
|
libraryHaskellDepends = [
|
||||||
|
aeson base bytestring cabal-install-parsers Cabal-syntax containers
|
||||||
|
directory filepath frontmatter generic-lens-lite mtl parsec pretty
|
||||||
|
regex-applicative text
|
||||||
|
];
|
||||||
|
executableHaskellDepends = [
|
||||||
|
base bytestring Cabal-syntax directory filepath
|
||||||
|
optparse-applicative
|
||||||
|
];
|
||||||
|
doHaddock = false;
|
||||||
|
description = "Concatenate changelog entries into a single one";
|
||||||
|
license = lib.licenses.gpl3Plus;
|
||||||
|
mainProgram = "changelog-d";
|
||||||
|
}
|
31
misc/changelog-d.nix
Normal file
31
misc/changelog-d.nix
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Taken temporarily from <nixpkgs/pkgs/by-name/ch/changelog-d/package.nix>
|
||||||
|
{
|
||||||
|
callPackage,
|
||||||
|
lib,
|
||||||
|
haskell,
|
||||||
|
haskellPackages,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
hsPkg = haskellPackages.callPackage ./changelog-d.cabal.nix { };
|
||||||
|
|
||||||
|
addCompletions = haskellPackages.generateOptparseApplicativeCompletions ["changelog-d"];
|
||||||
|
|
||||||
|
haskellModifications =
|
||||||
|
lib.flip lib.pipe [
|
||||||
|
addCompletions
|
||||||
|
haskell.lib.justStaticExecutables
|
||||||
|
];
|
||||||
|
|
||||||
|
mkDerivationOverrides = finalAttrs: oldAttrs: {
|
||||||
|
|
||||||
|
version = oldAttrs.version + "-git-${lib.strings.substring 0 7 oldAttrs.src.rev}";
|
||||||
|
|
||||||
|
meta = oldAttrs.meta // {
|
||||||
|
homepage = "https://codeberg.org/roberth/changelog-d";
|
||||||
|
maintainers = [ lib.maintainers.roberth ];
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
in
|
||||||
|
(haskellModifications hsPkg).overrideAttrs mkDerivationOverrides
|
10
mk/build-dir.mk
Normal file
10
mk/build-dir.mk
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
# Initialise support for build directories.
|
||||||
|
builddir ?=
|
||||||
|
|
||||||
|
ifdef builddir
|
||||||
|
buildprefix = $(builddir)/
|
||||||
|
buildprefixrel = $(builddir)
|
||||||
|
else
|
||||||
|
buildprefix =
|
||||||
|
buildprefixrel = .
|
||||||
|
endif
|
|
@ -1,7 +1,7 @@
|
||||||
# Remove overall test dir (at most one of the two should match) and
|
# Remove overall test dir (at most one of the two should match) and
|
||||||
# remove file extension.
|
# remove file extension.
|
||||||
test_name=$(echo -n "$test" | sed \
|
test_name=$(echo -n "$test" | sed \
|
||||||
-e "s|^unit-test-data/||" \
|
-e "s|^tests/unit/[^/]*/data/||" \
|
||||||
-e "s|^tests/functional/||" \
|
-e "s|^tests/functional/||" \
|
||||||
-e "s|\.sh$||" \
|
-e "s|\.sh$||" \
|
||||||
)
|
)
|
||||||
|
|
11
mk/install-dirs.mk
Normal file
11
mk/install-dirs.mk
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# Default installation paths.
|
||||||
|
prefix ?= /usr/local
|
||||||
|
libdir ?= $(prefix)/lib
|
||||||
|
bindir ?= $(prefix)/bin
|
||||||
|
libexecdir ?= $(prefix)/libexec
|
||||||
|
datadir ?= $(prefix)/share
|
||||||
|
localstatedir ?= $(prefix)/var
|
||||||
|
sysconfdir ?= $(prefix)/etc
|
||||||
|
mandir ?= $(prefix)/share/man
|
||||||
|
|
||||||
|
DESTDIR ?=
|
25
mk/lib.mk
25
mk/lib.mk
|
@ -43,27 +43,6 @@ define newline
|
||||||
endef
|
endef
|
||||||
|
|
||||||
|
|
||||||
# Default installation paths.
|
|
||||||
prefix ?= /usr/local
|
|
||||||
libdir ?= $(prefix)/lib
|
|
||||||
bindir ?= $(prefix)/bin
|
|
||||||
libexecdir ?= $(prefix)/libexec
|
|
||||||
datadir ?= $(prefix)/share
|
|
||||||
localstatedir ?= $(prefix)/var
|
|
||||||
sysconfdir ?= $(prefix)/etc
|
|
||||||
mandir ?= $(prefix)/share/man
|
|
||||||
|
|
||||||
|
|
||||||
# Initialise support for build directories.
|
|
||||||
builddir ?=
|
|
||||||
|
|
||||||
ifdef builddir
|
|
||||||
buildprefix = $(builddir)/
|
|
||||||
else
|
|
||||||
buildprefix =
|
|
||||||
endif
|
|
||||||
|
|
||||||
|
|
||||||
# Pass -fPIC if we're building dynamic libraries.
|
# Pass -fPIC if we're building dynamic libraries.
|
||||||
BUILD_SHARED_LIBS ?= 1
|
BUILD_SHARED_LIBS ?= 1
|
||||||
|
|
||||||
|
@ -94,6 +73,8 @@ ifeq ($(BUILD_DEBUG), 1)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
|
||||||
|
include mk/build-dir.mk
|
||||||
|
include mk/install-dirs.mk
|
||||||
include mk/functions.mk
|
include mk/functions.mk
|
||||||
include mk/tracing.mk
|
include mk/tracing.mk
|
||||||
include mk/clean.mk
|
include mk/clean.mk
|
||||||
|
@ -112,7 +93,7 @@ define include-sub-makefile
|
||||||
include $(1)
|
include $(1)
|
||||||
endef
|
endef
|
||||||
|
|
||||||
$(foreach mf, $(makefiles), $(eval $(call include-sub-makefile, $(mf))))
|
$(foreach mf, $(makefiles), $(eval $(call include-sub-makefile,$(mf))))
|
||||||
|
|
||||||
|
|
||||||
# Instantiate stuff.
|
# Instantiate stuff.
|
||||||
|
|
|
@ -87,6 +87,6 @@ define build-program
|
||||||
# Phony target to run this program (typically as a dependency of 'check').
|
# Phony target to run this program (typically as a dependency of 'check').
|
||||||
.PHONY: $(1)_RUN
|
.PHONY: $(1)_RUN
|
||||||
$(1)_RUN: $$($(1)_PATH)
|
$(1)_RUN: $$($(1)_PATH)
|
||||||
$(trace-test) $$(UNIT_TEST_ENV) $$($(1)_PATH)
|
$(trace-test) $$($(1)_ENV) $$($(1)_PATH)
|
||||||
|
|
||||||
endef
|
endef
|
||||||
|
|
|
@ -10,10 +10,10 @@ endef
|
||||||
|
|
||||||
ifneq ($(MAKECMDGOALS), clean)
|
ifneq ($(MAKECMDGOALS), clean)
|
||||||
|
|
||||||
%.h: %.h.in
|
$(buildprefix)%.h: %.h.in
|
||||||
$(trace-gen) rm -f $@ && ./config.status --quiet --header=$@
|
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
|
||||||
|
|
||||||
%: %.in
|
$(buildprefix)%: %.in
|
||||||
$(trace-gen) rm -f $@ && ./config.status --quiet --file=$@
|
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
|
||||||
|
|
||||||
endif
|
endif
|
||||||
|
|
385
package.nix
Normal file
385
package.nix
Normal file
|
@ -0,0 +1,385 @@
|
||||||
|
{ lib
|
||||||
|
, stdenv
|
||||||
|
, releaseTools
|
||||||
|
, autoconf-archive
|
||||||
|
, autoreconfHook
|
||||||
|
, aws-sdk-cpp
|
||||||
|
, boehmgc
|
||||||
|
, nlohmann_json
|
||||||
|
, bison
|
||||||
|
, boost
|
||||||
|
, brotli
|
||||||
|
, bzip2
|
||||||
|
, changelog-d
|
||||||
|
, curl
|
||||||
|
, editline
|
||||||
|
, fileset
|
||||||
|
, flex
|
||||||
|
, git
|
||||||
|
, gtest
|
||||||
|
, jq
|
||||||
|
, doxygen
|
||||||
|
, libarchive
|
||||||
|
, libcpuid
|
||||||
|
, libgit2
|
||||||
|
, libseccomp
|
||||||
|
, libsodium
|
||||||
|
, lowdown
|
||||||
|
, mdbook
|
||||||
|
, mdbook-linkcheck
|
||||||
|
, mercurial
|
||||||
|
, openssh
|
||||||
|
, openssl
|
||||||
|
, pkg-config
|
||||||
|
, rapidcheck
|
||||||
|
, sqlite
|
||||||
|
, util-linux
|
||||||
|
, xz
|
||||||
|
|
||||||
|
, busybox-sandbox-shell ? null
|
||||||
|
|
||||||
|
# Configuration Options
|
||||||
|
#:
|
||||||
|
# This probably seems like too many degrees of freedom, but it
|
||||||
|
# faithfully reflects how the underlying configure + make build system
|
||||||
|
# work. The top-level flake.nix will choose useful combinations of these
|
||||||
|
# options to CI.
|
||||||
|
|
||||||
|
, pname ? "nix"
|
||||||
|
|
||||||
|
, versionSuffix ? ""
|
||||||
|
, officialRelease ? false
|
||||||
|
|
||||||
|
# Whether to build Nix. Useful to skip for tasks like (a) just
|
||||||
|
# generating API docs or (b) testing existing pre-built versions of Nix
|
||||||
|
, doBuild ? true
|
||||||
|
|
||||||
|
# Run the unit tests as part of the build. See `installUnitTests` for an
|
||||||
|
# alternative to this.
|
||||||
|
, doCheck ? __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# Run the functional tests as part of the build.
|
||||||
|
, doInstallCheck ? test-client != null || __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# Check test coverage of Nix. Probably want to use with with at least
|
||||||
|
# one of `doCHeck` or `doInstallCheck` enabled.
|
||||||
|
, withCoverageChecks ? false
|
||||||
|
|
||||||
|
# Whether to build the regular manual
|
||||||
|
, enableManual ? __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# Whether to compile `rl-next.md`, the release notes for the next
|
||||||
|
# not-yet-released version of Nix in the manul, from the individual
|
||||||
|
# change log entries in the directory.
|
||||||
|
, buildUnreleasedNotes ? false
|
||||||
|
|
||||||
|
# Whether to build the internal API docs, can be done separately from
|
||||||
|
# everything else.
|
||||||
|
, enableInternalAPIDocs ? false
|
||||||
|
|
||||||
|
# Whether to install unit tests. This is useful when cross compiling
|
||||||
|
# since we cannot run them natively during the build, but can do so
|
||||||
|
# later.
|
||||||
|
, installUnitTests ? __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# For running the functional tests against a pre-built Nix. Probably
|
||||||
|
# want to use in conjunction with `doBuild = false;`.
|
||||||
|
, test-daemon ? null
|
||||||
|
, test-client ? null
|
||||||
|
|
||||||
|
# Not a real argument, just the only way to approximate let-binding some
|
||||||
|
# stuff for argument defaults.
|
||||||
|
, __forDefaults ? {
|
||||||
|
canRunInstalled = doBuild && stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
||||||
|
}
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
version = lib.fileContents ./.version + versionSuffix;
|
||||||
|
|
||||||
|
# selected attributes with defaults, will be used to define some
|
||||||
|
# things which should instead be gotten via `finalAttrs` in order to
|
||||||
|
# work with overriding.
|
||||||
|
attrs = {
|
||||||
|
inherit doBuild doCheck doInstallCheck;
|
||||||
|
};
|
||||||
|
|
||||||
|
filesets = {
|
||||||
|
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
||||||
|
|
||||||
|
configureFiles = fileset.unions [
|
||||||
|
./.version
|
||||||
|
./configure.ac
|
||||||
|
./m4
|
||||||
|
# TODO: do we really need README.md? It doesn't seem used in the build.
|
||||||
|
./README.md
|
||||||
|
];
|
||||||
|
|
||||||
|
topLevelBuildFiles = fileset.unions [
|
||||||
|
./local.mk
|
||||||
|
./Makefile
|
||||||
|
./Makefile.config.in
|
||||||
|
./mk
|
||||||
|
];
|
||||||
|
|
||||||
|
functionalTestFiles = fileset.unions [
|
||||||
|
./tests/functional
|
||||||
|
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
mkDerivation =
|
||||||
|
if withCoverageChecks
|
||||||
|
then
|
||||||
|
# TODO support `finalAttrs` args function in
|
||||||
|
# `releaseTools.coverageAnalysis`.
|
||||||
|
argsFun:
|
||||||
|
releaseTools.coverageAnalysis (let args = argsFun args; in args)
|
||||||
|
else stdenv.mkDerivation;
|
||||||
|
in
|
||||||
|
|
||||||
|
mkDerivation (finalAttrs: let
|
||||||
|
|
||||||
|
inherit (finalAttrs)
|
||||||
|
doCheck
|
||||||
|
doInstallCheck
|
||||||
|
;
|
||||||
|
|
||||||
|
doBuild = !finalAttrs.dontBuild;
|
||||||
|
|
||||||
|
# Either running the unit tests during the build, or installing them
|
||||||
|
# to be run later, requiresthe unit tests to be built.
|
||||||
|
buildUnitTests = doCheck || installUnitTests;
|
||||||
|
|
||||||
|
anySortOfTesting = buildUnitTests || doInstallCheck;
|
||||||
|
|
||||||
|
in {
|
||||||
|
inherit pname version;
|
||||||
|
|
||||||
|
src =
|
||||||
|
let
|
||||||
|
|
||||||
|
in
|
||||||
|
fileset.toSource {
|
||||||
|
root = ./.;
|
||||||
|
fileset = fileset.intersect filesets.baseFiles (fileset.unions ([
|
||||||
|
filesets.configureFiles
|
||||||
|
filesets.topLevelBuildFiles
|
||||||
|
./doc/internal-api
|
||||||
|
] ++ lib.optionals doBuild [
|
||||||
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
|
./doc
|
||||||
|
./misc
|
||||||
|
./precompiled-headers.h
|
||||||
|
./src
|
||||||
|
./tests/unit
|
||||||
|
./COPYING
|
||||||
|
./scripts/local.mk
|
||||||
|
] ++ lib.optionals anySortOfTesting [
|
||||||
|
filesets.functionalTestFiles
|
||||||
|
]));
|
||||||
|
};
|
||||||
|
|
||||||
|
VERSION_SUFFIX = versionSuffix;
|
||||||
|
|
||||||
|
outputs = [ "out" ]
|
||||||
|
++ lib.optional doBuild "dev"
|
||||||
|
# If we are doing just build or just docs, the one thing will use
|
||||||
|
# "out". We only need additional outputs if we are doing both.
|
||||||
|
++ lib.optional (doBuild && (enableManual || enableInternalAPIDocs)) "doc"
|
||||||
|
++ lib.optional installUnitTests "check";
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
autoconf-archive
|
||||||
|
autoreconfHook
|
||||||
|
pkg-config
|
||||||
|
] ++ lib.optionals doBuild [
|
||||||
|
bison
|
||||||
|
flex
|
||||||
|
] ++ lib.optionals enableManual [
|
||||||
|
(lib.getBin lowdown)
|
||||||
|
mdbook
|
||||||
|
mdbook-linkcheck
|
||||||
|
] ++ lib.optionals (doInstallCheck || enableManual) [
|
||||||
|
jq # Also for custom mdBook preprocessor.
|
||||||
|
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
|
||||||
|
# Official releases don't have rl-next, so we don't need to compile a
|
||||||
|
# changelog
|
||||||
|
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
|
||||||
|
++ lib.optional enableInternalAPIDocs doxygen
|
||||||
|
;
|
||||||
|
|
||||||
|
buildInputs = lib.optionals doBuild [
|
||||||
|
boost
|
||||||
|
brotli
|
||||||
|
bzip2
|
||||||
|
curl
|
||||||
|
libarchive
|
||||||
|
libgit2
|
||||||
|
libsodium
|
||||||
|
openssl
|
||||||
|
sqlite
|
||||||
|
xz
|
||||||
|
] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [
|
||||||
|
editline
|
||||||
|
lowdown
|
||||||
|
] ++ lib.optional stdenv.isLinux libseccomp
|
||||||
|
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||||
|
# There have been issues building these dependencies
|
||||||
|
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin))
|
||||||
|
(aws-sdk-cpp.override {
|
||||||
|
apis = ["s3" "transfer"];
|
||||||
|
customMemoryManagement = false;
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
propagatedBuildInputs = [
|
||||||
|
boehmgc
|
||||||
|
nlohmann_json
|
||||||
|
];
|
||||||
|
|
||||||
|
dontBuild = !attrs.doBuild;
|
||||||
|
doCheck = attrs.doCheck;
|
||||||
|
|
||||||
|
checkInputs = [
|
||||||
|
gtest
|
||||||
|
rapidcheck
|
||||||
|
];
|
||||||
|
|
||||||
|
nativeCheckInputs = [
|
||||||
|
git
|
||||||
|
mercurial
|
||||||
|
openssh
|
||||||
|
];
|
||||||
|
|
||||||
|
disallowedReferences = [ boost ];
|
||||||
|
|
||||||
|
preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) (
|
||||||
|
''
|
||||||
|
# Copy libboost_context so we don't get all of Boost in our closure.
|
||||||
|
# https://github.com/NixOS/nixpkgs/issues/45462
|
||||||
|
mkdir -p $out/lib
|
||||||
|
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib
|
||||||
|
rm -f $out/lib/*.a
|
||||||
|
'' + lib.optionalString stdenv.hostPlatform.isLinux ''
|
||||||
|
chmod u+w $out/lib/*.so.*
|
||||||
|
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
||||||
|
'' + lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||||
|
for LIB in $out/lib/*.dylib; do
|
||||||
|
chmod u+w $LIB
|
||||||
|
install_name_tool -id $LIB $LIB
|
||||||
|
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
|
||||||
|
done
|
||||||
|
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
||||||
|
''
|
||||||
|
);
|
||||||
|
|
||||||
|
configureFlags = [
|
||||||
|
"--sysconfdir=/etc"
|
||||||
|
(lib.enableFeature doBuild "build")
|
||||||
|
(lib.enableFeature anySortOfTesting "tests")
|
||||||
|
(lib.enableFeature enableInternalAPIDocs "internal-api-docs")
|
||||||
|
(lib.enableFeature enableManual "doc-gen")
|
||||||
|
(lib.enableFeature installUnitTests "install-unit-tests")
|
||||||
|
] ++ lib.optionals installUnitTests [
|
||||||
|
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
|
||||||
|
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
|
||||||
|
] ++ lib.optionals (doBuild) [
|
||||||
|
"--with-boost=${boost}/lib"
|
||||||
|
] ++ lib.optionals (doBuild && stdenv.isLinux) [
|
||||||
|
"--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox"
|
||||||
|
] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux"))
|
||||||
|
"LDFLAGS=-fuse-ld=gold"
|
||||||
|
++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell"
|
||||||
|
++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include";
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
||||||
|
|
||||||
|
installTargets = lib.optional doBuild "install"
|
||||||
|
++ lib.optional enableInternalAPIDocs "internal-api-html";
|
||||||
|
|
||||||
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
# In this case we are probably just running tests, and so there isn't
|
||||||
|
# anything to install, we just make an empty directory to signify tests
|
||||||
|
# succeeded.
|
||||||
|
installPhase = if finalAttrs.installTargets != [] then null else ''
|
||||||
|
mkdir -p $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
postInstall = lib.optionalString doBuild (
|
||||||
|
''
|
||||||
|
mkdir -p $doc/nix-support
|
||||||
|
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
||||||
|
'' + lib.optionalString stdenv.hostPlatform.isStatic ''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
||||||
|
'' + lib.optionalString stdenv.isDarwin ''
|
||||||
|
install_name_tool \
|
||||||
|
-change ${boost}/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libnixutil.dylib
|
||||||
|
''
|
||||||
|
) + lib.optionalString enableInternalAPIDocs ''
|
||||||
|
mkdir -p ''${!outputDoc}/nix-support
|
||||||
|
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
|
||||||
|
'';
|
||||||
|
|
||||||
|
doInstallCheck = attrs.doInstallCheck;
|
||||||
|
|
||||||
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
|
# Work around buggy detection in stdenv.
|
||||||
|
installCheckTarget = "installcheck";
|
||||||
|
|
||||||
|
# Work around weird bug where it doesn't think there is a Makefile.
|
||||||
|
installCheckPhase = if (!doBuild && doInstallCheck) then ''
|
||||||
|
mkdir -p src/nix-channel
|
||||||
|
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
|
||||||
|
'' else null;
|
||||||
|
|
||||||
|
# Needed for tests if we are not doing a build, but testing existing
|
||||||
|
# built Nix.
|
||||||
|
preInstallCheck = lib.optionalString (! doBuild) ''
|
||||||
|
mkdir -p src/nix-channel
|
||||||
|
'';
|
||||||
|
|
||||||
|
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||||
|
|
||||||
|
# TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated
|
||||||
|
# to work with `strictDeps`.
|
||||||
|
strictDeps = !withCoverageChecks;
|
||||||
|
|
||||||
|
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
platforms = lib.platforms.unix ++ lib.platforms.windows;
|
||||||
|
mainProgram = "nix";
|
||||||
|
broken = !(lib.all (a: a) [
|
||||||
|
# We cannot run or install unit tests if we don't build them or
|
||||||
|
# Nix proper (which they depend on).
|
||||||
|
(installUnitTests -> doBuild)
|
||||||
|
(doCheck -> doBuild)
|
||||||
|
# We have to build the manual to build unreleased notes, as those
|
||||||
|
# are part of the manual
|
||||||
|
(buildUnreleasedNotes -> enableManual)
|
||||||
|
# The build process for the manual currently requires extracting
|
||||||
|
# data from the Nix executable we are trying to document.
|
||||||
|
(enableManual -> doBuild)
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // lib.optionalAttrs withCoverageChecks {
|
||||||
|
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
||||||
|
|
||||||
|
hardeningDisable = ["fortify"];
|
||||||
|
|
||||||
|
NIX_CFLAGS_COMPILE = "-DCOVERAGE=1";
|
||||||
|
|
||||||
|
dontInstall = false;
|
||||||
|
} // lib.optionalAttrs (test-daemon != null) {
|
||||||
|
NIX_DAEMON_PACKAGE = test-daemon;
|
||||||
|
} // lib.optionalAttrs (test-client != null) {
|
||||||
|
NIX_CLIENT_PACKAGE = test-client;
|
||||||
|
})
|
|
@ -9,6 +9,7 @@
|
||||||
#undef do_close
|
#undef do_close
|
||||||
|
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
#include "realisation.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "crypto.hh"
|
#include "crypto.hh"
|
||||||
|
@ -77,7 +78,7 @@ SV * queryReferences(char * path)
|
||||||
SV * queryPathHash(char * path)
|
SV * queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
|
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -103,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
|
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
@ -204,8 +205,8 @@ void importPaths(int fd, int dontCheckSigs)
|
||||||
SV * hashPath(char * algo, int base32, char * path)
|
SV * hashPath(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashPath(parseHashType(algo), path).first;
|
Hash h = hashPath(parseHashAlgo(algo), path).first;
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -215,8 +216,8 @@ SV * hashPath(char * algo, int base32, char * path)
|
||||||
SV * hashFile(char * algo, int base32, char * path)
|
SV * hashFile(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashFile(parseHashType(algo), path);
|
Hash h = hashFile(parseHashAlgo(algo), path);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -226,8 +227,8 @@ SV * hashFile(char * algo, int base32, char * path)
|
||||||
SV * hashString(char * algo, int base32, char * s)
|
SV * hashString(char * algo, int base32, char * s)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashString(parseHashType(algo), s);
|
Hash h = hashString(parseHashAlgo(algo), s);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -237,8 +238,8 @@ SV * hashString(char * algo, int base32, char * s)
|
||||||
SV * convertHash(char * algo, char * s, int toBase32)
|
SV * convertHash(char * algo, char * s, int toBase32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
auto h = Hash::parseAny(s, parseHashAlgo(algo));
|
||||||
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -280,7 +281,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo));
|
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -290,7 +291,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||||
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(hash, parseHashType(algo));
|
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
||||||
.method = method,
|
.method = method,
|
||||||
|
|
84
scripts/binary-tarball.nix
Normal file
84
scripts/binary-tarball.nix
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
{ runCommand
|
||||||
|
, system
|
||||||
|
, buildPackages
|
||||||
|
, cacert
|
||||||
|
, nix
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
installerClosureInfo = buildPackages.closureInfo {
|
||||||
|
rootPaths = [ nix cacert ];
|
||||||
|
};
|
||||||
|
|
||||||
|
inherit (nix) version;
|
||||||
|
|
||||||
|
env = {
|
||||||
|
#nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
||||||
|
meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
|
||||||
|
};
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
runCommand "nix-binary-tarball-${version}" env ''
|
||||||
|
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
||||||
|
cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
||||||
|
substitute ${./install-nix-from-closure.sh} $TMPDIR/install \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
|
substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
|
if type -p shellcheck; then
|
||||||
|
# SC1090: Don't worry about not being able to find
|
||||||
|
# $nix/etc/profile.d/nix.sh
|
||||||
|
shellcheck --exclude SC1090 $TMPDIR/install
|
||||||
|
shellcheck $TMPDIR/create-darwin-volume.sh
|
||||||
|
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
||||||
|
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
|
||||||
|
# SC1091: Don't panic about not being able to source
|
||||||
|
# /etc/profile
|
||||||
|
# SC2002: Ignore "useless cat" "error", when loading
|
||||||
|
# .reginfo, as the cat is a much cleaner
|
||||||
|
# implementation, even though it is "useless"
|
||||||
|
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
||||||
|
# root's home directory
|
||||||
|
shellcheck --external-sources \
|
||||||
|
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
||||||
|
fi
|
||||||
|
|
||||||
|
chmod +x $TMPDIR/install
|
||||||
|
chmod +x $TMPDIR/create-darwin-volume.sh
|
||||||
|
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
||||||
|
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
chmod +x $TMPDIR/install-multi-user
|
||||||
|
dir=nix-${version}-${system}
|
||||||
|
fn=$out/$dir.tar.xz
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
|
tar cvfJ $fn \
|
||||||
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
|
--mtime='1970-01-01' \
|
||||||
|
--absolute-names \
|
||||||
|
--hard-dereference \
|
||||||
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
|
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
||||||
|
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
||||||
|
--transform "s,$NIX_STORE,$dir/store,S" \
|
||||||
|
$TMPDIR/install \
|
||||||
|
$TMPDIR/create-darwin-volume.sh \
|
||||||
|
$TMPDIR/install-darwin-multi-user.sh \
|
||||||
|
$TMPDIR/install-systemd-multi-user.sh \
|
||||||
|
$TMPDIR/install-multi-user \
|
||||||
|
$TMPDIR/reginfo \
|
||||||
|
$(cat ${installerClosureInfo}/store-paths)
|
||||||
|
''
|
36
scripts/installer.nix
Normal file
36
scripts/installer.nix
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
{ lib
|
||||||
|
, runCommand
|
||||||
|
, nix
|
||||||
|
, tarballs
|
||||||
|
}:
|
||||||
|
|
||||||
|
runCommand "installer-script" {
|
||||||
|
buildInputs = [ nix ];
|
||||||
|
} ''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
|
||||||
|
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
||||||
|
tarballPath() {
|
||||||
|
# Remove the store prefix
|
||||||
|
local path=''${1#${builtins.storeDir}/}
|
||||||
|
# Get the path relative to the derivation root
|
||||||
|
local rest=''${path#*/}
|
||||||
|
# Get the derivation hash
|
||||||
|
local drvHash=''${path%%-*}
|
||||||
|
echo "$drvHash/$rest"
|
||||||
|
}
|
||||||
|
|
||||||
|
substitute ${./install.in} $out/install \
|
||||||
|
${lib.concatMapStrings
|
||||||
|
(tarball: let
|
||||||
|
inherit (tarball.stdenv.hostPlatform) system;
|
||||||
|
in '' \
|
||||||
|
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
|
||||||
|
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
|
||||||
|
''
|
||||||
|
)
|
||||||
|
tarballs
|
||||||
|
} --replace '@nixVersion@' ${nix.version}
|
||||||
|
|
||||||
|
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||||
|
''
|
|
@ -80,7 +80,7 @@ SingleDerivedPath SingleBuiltPath::discardOutputPath() const
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
nlohmann::json BuiltPath::Built::toJSON(const Store & store) const
|
nlohmann::json BuiltPath::Built::toJSON(const StoreDirConfig & store) const
|
||||||
{
|
{
|
||||||
nlohmann::json res;
|
nlohmann::json res;
|
||||||
res["drvPath"] = drvPath->toJSON(store);
|
res["drvPath"] = drvPath->toJSON(store);
|
||||||
|
@ -90,7 +90,7 @@ nlohmann::json BuiltPath::Built::toJSON(const Store & store) const
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const
|
nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) const
|
||||||
{
|
{
|
||||||
nlohmann::json res;
|
nlohmann::json res;
|
||||||
res["drvPath"] = drvPath->toJSON(store);
|
res["drvPath"] = drvPath->toJSON(store);
|
||||||
|
@ -100,14 +100,14 @@ nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
nlohmann::json SingleBuiltPath::toJSON(const Store & store) const
|
nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const
|
||||||
{
|
{
|
||||||
return std::visit([&](const auto & buildable) {
|
return std::visit([&](const auto & buildable) {
|
||||||
return buildable.toJSON(store);
|
return buildable.toJSON(store);
|
||||||
}, raw());
|
}, raw());
|
||||||
}
|
}
|
||||||
|
|
||||||
nlohmann::json BuiltPath::toJSON(const Store & store) const
|
nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const
|
||||||
{
|
{
|
||||||
return std::visit([&](const auto & buildable) {
|
return std::visit([&](const auto & buildable) {
|
||||||
return buildable.toJSON(store);
|
return buildable.toJSON(store);
|
||||||
|
|
|
@ -14,9 +14,9 @@ struct SingleBuiltPathBuilt {
|
||||||
|
|
||||||
SingleDerivedPathBuilt discardOutputPath() const;
|
SingleDerivedPathBuilt discardOutputPath() const;
|
||||||
|
|
||||||
std::string to_string(const Store & store) const;
|
std::string to_string(const StoreDirConfig & store) const;
|
||||||
static SingleBuiltPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
static SingleBuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
|
||||||
nlohmann::json toJSON(const Store & store) const;
|
nlohmann::json toJSON(const StoreDirConfig & store) const;
|
||||||
|
|
||||||
DECLARE_CMP(SingleBuiltPathBuilt);
|
DECLARE_CMP(SingleBuiltPathBuilt);
|
||||||
};
|
};
|
||||||
|
@ -41,8 +41,8 @@ struct SingleBuiltPath : _SingleBuiltPathRaw {
|
||||||
|
|
||||||
SingleDerivedPath discardOutputPath() const;
|
SingleDerivedPath discardOutputPath() const;
|
||||||
|
|
||||||
static SingleBuiltPath parse(const Store & store, std::string_view);
|
static SingleBuiltPath parse(const StoreDirConfig & store, std::string_view);
|
||||||
nlohmann::json toJSON(const Store & store) const;
|
nlohmann::json toJSON(const StoreDirConfig & store) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
|
static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
|
||||||
|
@ -59,9 +59,9 @@ struct BuiltPathBuilt {
|
||||||
ref<SingleBuiltPath> drvPath;
|
ref<SingleBuiltPath> drvPath;
|
||||||
std::map<std::string, StorePath> outputs;
|
std::map<std::string, StorePath> outputs;
|
||||||
|
|
||||||
std::string to_string(const Store & store) const;
|
std::string to_string(const StoreDirConfig & store) const;
|
||||||
static BuiltPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
static BuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
|
||||||
nlohmann::json toJSON(const Store & store) const;
|
nlohmann::json toJSON(const StoreDirConfig & store) const;
|
||||||
|
|
||||||
DECLARE_CMP(BuiltPathBuilt);
|
DECLARE_CMP(BuiltPathBuilt);
|
||||||
};
|
};
|
||||||
|
@ -89,7 +89,7 @@ struct BuiltPath : _BuiltPathRaw {
|
||||||
StorePathSet outPaths() const;
|
StorePathSet outPaths() const;
|
||||||
RealisedPath::Set toRealisedPaths(Store & store) const;
|
RealisedPath::Set toRealisedPaths(Store & store) const;
|
||||||
|
|
||||||
nlohmann::json toJSON(const Store & store) const;
|
nlohmann::json toJSON(const StoreDirConfig & store) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::vector<BuiltPath> BuiltPaths;
|
typedef std::vector<BuiltPath> BuiltPaths;
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
|
#include "markdown.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON()
|
||||||
return MultiCommand::toJSON();
|
return MultiCommand::toJSON();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void NixMultiCommand::run()
|
||||||
|
{
|
||||||
|
if (!command) {
|
||||||
|
std::set<std::string> subCommandTextLines;
|
||||||
|
for (auto & [name, _] : commands)
|
||||||
|
subCommandTextLines.insert(fmt("- `%s`", name));
|
||||||
|
std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n",
|
||||||
|
commandName, concatStringsSep("\n", subCommandTextLines));
|
||||||
|
throw UsageError(renderMarkdownToTerminal(markdownError));
|
||||||
|
}
|
||||||
|
command->second->run();
|
||||||
|
}
|
||||||
|
|
||||||
StoreCommand::StoreCommand()
|
StoreCommand::StoreCommand()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,9 +26,13 @@ static constexpr Command::Category catNixInstallation = 102;
|
||||||
|
|
||||||
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
||||||
|
|
||||||
struct NixMultiCommand : virtual MultiCommand, virtual Command
|
struct NixMultiCommand : MultiCommand, virtual Command
|
||||||
{
|
{
|
||||||
nlohmann::json toJSON() override;
|
nlohmann::json toJSON() override;
|
||||||
|
|
||||||
|
using MultiCommand::MultiCommand;
|
||||||
|
|
||||||
|
virtual void run() override;
|
||||||
};
|
};
|
||||||
|
|
||||||
// For the overloaded run methods
|
// For the overloaded run methods
|
||||||
|
|
|
@ -141,7 +141,7 @@ MixEvalArgs::MixEvalArgs()
|
||||||
.longName = "eval-store",
|
.longName = "eval-store",
|
||||||
.description =
|
.description =
|
||||||
R"(
|
R"(
|
||||||
The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
|
The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format)
|
||||||
to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them.
|
to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them.
|
||||||
)",
|
)",
|
||||||
.category = category,
|
.category = category,
|
||||||
|
|
|
@ -260,9 +260,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||||
|
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
Expr *e = state->parseExprFromFile(
|
auto e =
|
||||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
state->parseExprFromFile(
|
||||||
);
|
resolveExprPath(
|
||||||
|
lookupFileArg(*state, *file)));
|
||||||
|
|
||||||
Value root;
|
Value root;
|
||||||
state->eval(e, root);
|
state->eval(e, root);
|
||||||
|
|
|
@ -12,4 +12,4 @@ libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
|
||||||
|
|
||||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
|
||||||
|
|
|
@ -43,7 +43,6 @@ extern "C" {
|
||||||
#include "finally.hh"
|
#include "finally.hh"
|
||||||
#include "markdown.hh"
|
#include "markdown.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
#include "progress-bar.hh"
|
|
||||||
#include "print.hh"
|
#include "print.hh"
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
|
@ -262,13 +261,11 @@ void NixRepl::mainLoop()
|
||||||
rl_set_list_possib_func(listPossibleCallback);
|
rl_set_list_possib_func(listPossibleCallback);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Stop the progress bar because it interferes with the display of
|
|
||||||
the repl. */
|
|
||||||
stopProgressBar();
|
|
||||||
|
|
||||||
std::string input;
|
std::string input;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
|
// Hide the progress bar while waiting for user input, so that it won't interfere.
|
||||||
|
logger->pause();
|
||||||
// When continuing input from previous lines, don't print a prompt, just align to the same
|
// When continuing input from previous lines, don't print a prompt, just align to the same
|
||||||
// number of chars as the prompt.
|
// number of chars as the prompt.
|
||||||
if (!getLine(input, input.empty() ? "nix-repl> " : " ")) {
|
if (!getLine(input, input.empty() ? "nix-repl> " : " ")) {
|
||||||
|
@ -278,6 +275,7 @@ void NixRepl::mainLoop()
|
||||||
logger->cout("");
|
logger->cout("");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
logger->resume();
|
||||||
try {
|
try {
|
||||||
if (!removeWhitespace(input).empty() && !processLine(input)) return;
|
if (!removeWhitespace(input).empty() && !processLine(input)) return;
|
||||||
} catch (ParseError & e) {
|
} catch (ParseError & e) {
|
||||||
|
|
|
@ -22,7 +22,7 @@ struct AttrDb
|
||||||
{
|
{
|
||||||
std::atomic_bool failed{false};
|
std::atomic_bool failed{false};
|
||||||
|
|
||||||
const Store & cfg;
|
const StoreDirConfig & cfg;
|
||||||
|
|
||||||
struct State
|
struct State
|
||||||
{
|
{
|
||||||
|
@ -39,7 +39,7 @@ struct AttrDb
|
||||||
SymbolTable & symbols;
|
SymbolTable & symbols;
|
||||||
|
|
||||||
AttrDb(
|
AttrDb(
|
||||||
const Store & cfg,
|
const StoreDirConfig & cfg,
|
||||||
const Hash & fingerprint,
|
const Hash & fingerprint,
|
||||||
SymbolTable & symbols)
|
SymbolTable & symbols)
|
||||||
: cfg(cfg)
|
: cfg(cfg)
|
||||||
|
@ -323,7 +323,7 @@ struct AttrDb
|
||||||
};
|
};
|
||||||
|
|
||||||
static std::shared_ptr<AttrDb> makeAttrDb(
|
static std::shared_ptr<AttrDb> makeAttrDb(
|
||||||
const Store & cfg,
|
const StoreDirConfig & cfg,
|
||||||
const Hash & fingerprint,
|
const Hash & fingerprint,
|
||||||
SymbolTable & symbols)
|
SymbolTable & symbols)
|
||||||
{
|
{
|
||||||
|
|
|
@ -103,8 +103,10 @@ void EvalState::forceValue(Value & v, Callable getPos)
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (v.isApp())
|
else if (v.isApp()) {
|
||||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
PosIdx pos = getPos();
|
||||||
|
callFunction(*v.app.left, *v.app.right, v, pos);
|
||||||
|
}
|
||||||
else if (v.isBlackhole())
|
else if (v.isBlackhole())
|
||||||
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
||||||
}
|
}
|
||||||
|
@ -121,9 +123,9 @@ template <typename Callable>
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v, noPos);
|
PosIdx pos = getPos();
|
||||||
|
forceValue(v, pos);
|
||||||
if (v.type() != nAttrs) {
|
if (v.type() != nAttrs) {
|
||||||
PosIdx pos = getPos();
|
|
||||||
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,7 +134,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v, noPos);
|
forceValue(v, pos);
|
||||||
if (!v.isList()) {
|
if (!v.isList()) {
|
||||||
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,6 +68,11 @@ struct EvalSettings : Config
|
||||||
evaluation mode. For example, when set to
|
evaluation mode. For example, when set to
|
||||||
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
|
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
|
||||||
allowed to access `https://github.com/NixOS/patchelf.git`.
|
allowed to access `https://github.com/NixOS/patchelf.git`.
|
||||||
|
|
||||||
|
Access is granted when
|
||||||
|
- the URI is equal to the prefix,
|
||||||
|
- or the URI is a subpath of the prefix,
|
||||||
|
- or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme.
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
||||||
|
|
|
@ -14,8 +14,11 @@
|
||||||
#include "profiles.hh"
|
#include "profiles.hh"
|
||||||
#include "print.hh"
|
#include "print.hh"
|
||||||
#include "fs-input-accessor.hh"
|
#include "fs-input-accessor.hh"
|
||||||
|
#include "filtering-input-accessor.hh"
|
||||||
#include "memory-input-accessor.hh"
|
#include "memory-input-accessor.hh"
|
||||||
#include "signals.hh"
|
#include "signals.hh"
|
||||||
|
#include "gc-small-vector.hh"
|
||||||
|
#include "url.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
@ -31,6 +34,7 @@
|
||||||
|
|
||||||
#include <sys/resource.h>
|
#include <sys/resource.h>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <boost/container/small_vector.hpp>
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
|
|
||||||
|
@ -342,7 +346,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
||||||
} else {
|
} else {
|
||||||
Value nameValue;
|
Value nameValue;
|
||||||
name.expr->eval(state, env, nameValue);
|
name.expr->eval(state, env, nameValue);
|
||||||
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
|
state.forceStringNoCtx(nameValue, name.expr->getPos(), "while evaluating an attribute name");
|
||||||
return state.symbols.create(nameValue.string_view());
|
return state.symbols.create(nameValue.string_view());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -507,7 +511,16 @@ EvalState::EvalState(
|
||||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||||
, repair(NoRepair)
|
, repair(NoRepair)
|
||||||
, emptyBindings(0)
|
, emptyBindings(0)
|
||||||
, rootFS(makeFSInputAccessor(CanonPath::root))
|
, rootFS(
|
||||||
|
evalSettings.restrictEval || evalSettings.pureEval
|
||||||
|
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
|
||||||
|
[](const CanonPath & path) -> RestrictedPathError {
|
||||||
|
auto modeInformation = evalSettings.pureEval
|
||||||
|
? "in pure evaluation mode (use '--impure' to override)"
|
||||||
|
: "in restricted mode";
|
||||||
|
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||||
|
}))
|
||||||
|
: makeFSInputAccessor(CanonPath::root))
|
||||||
, corepkgsFS(makeMemoryInputAccessor())
|
, corepkgsFS(makeMemoryInputAccessor())
|
||||||
, internalFS(makeMemoryInputAccessor())
|
, internalFS(makeMemoryInputAccessor())
|
||||||
, derivationInternal{corepkgsFS->addFile(
|
, derivationInternal{corepkgsFS->addFile(
|
||||||
|
@ -549,28 +562,10 @@ EvalState::EvalState(
|
||||||
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
|
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (evalSettings.restrictEval || evalSettings.pureEval) {
|
/* Allow access to all paths in the search path. */
|
||||||
allowedPaths = PathSet();
|
if (rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
|
for (auto & i : searchPath.elements)
|
||||||
for (auto & i : searchPath.elements) {
|
resolveSearchPathPath(i.path, true);
|
||||||
auto r = resolveSearchPathPath(i.path);
|
|
||||||
if (!r) continue;
|
|
||||||
|
|
||||||
auto path = std::move(*r);
|
|
||||||
|
|
||||||
if (store->isInStore(path)) {
|
|
||||||
try {
|
|
||||||
StorePathSet closure;
|
|
||||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
|
||||||
for (auto & path : closure)
|
|
||||||
allowPath(path);
|
|
||||||
} catch (InvalidPath &) {
|
|
||||||
allowPath(path);
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
allowPath(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
corepkgsFS->addFile(
|
corepkgsFS->addFile(
|
||||||
CanonPath("fetchurl.nix"),
|
CanonPath("fetchurl.nix"),
|
||||||
|
@ -588,14 +583,14 @@ EvalState::~EvalState()
|
||||||
|
|
||||||
void EvalState::allowPath(const Path & path)
|
void EvalState::allowPath(const Path & path)
|
||||||
{
|
{
|
||||||
if (allowedPaths)
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
allowedPaths->insert(path);
|
rootFS2->allowPath(CanonPath(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::allowPath(const StorePath & storePath)
|
void EvalState::allowPath(const StorePath & storePath)
|
||||||
{
|
{
|
||||||
if (allowedPaths)
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
allowedPaths->insert(store->toRealPath(storePath));
|
rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
||||||
|
@ -605,79 +600,57 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
||||||
mkStorePathString(storePath, v);
|
mkStorePathString(storePath, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
inline static bool isJustSchemePrefix(std::string_view prefix)
|
||||||
{
|
{
|
||||||
// Don't check non-rootFS accessors, they're in a different namespace.
|
return
|
||||||
if (path_.accessor != ref<InputAccessor>(rootFS)) return path_;
|
!prefix.empty()
|
||||||
|
&& prefix[prefix.size() - 1] == ':'
|
||||||
if (!allowedPaths) return path_;
|
&& isValidSchemeName(prefix.substr(0, prefix.size() - 1));
|
||||||
|
|
||||||
auto i = resolvedPaths.find(path_.path.abs());
|
|
||||||
if (i != resolvedPaths.end())
|
|
||||||
return i->second;
|
|
||||||
|
|
||||||
bool found = false;
|
|
||||||
|
|
||||||
/* First canonicalize the path without symlinks, so we make sure an
|
|
||||||
* attacker can't append ../../... to a path that would be in allowedPaths
|
|
||||||
* and thus leak symlink targets.
|
|
||||||
*/
|
|
||||||
Path abspath = canonPath(path_.path.abs());
|
|
||||||
|
|
||||||
for (auto & i : *allowedPaths) {
|
|
||||||
if (isDirOrInDir(abspath, i)) {
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!found) {
|
|
||||||
auto modeInformation = evalSettings.pureEval
|
|
||||||
? "in pure eval mode (use '--impure' to override)"
|
|
||||||
: "in restricted mode";
|
|
||||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Resolve symlinks. */
|
|
||||||
debug("checking access to '%s'", abspath);
|
|
||||||
SourcePath path = rootPath(CanonPath(canonPath(abspath, true)));
|
|
||||||
|
|
||||||
for (auto & i : *allowedPaths) {
|
|
||||||
if (isDirOrInDir(path.path.abs(), i)) {
|
|
||||||
resolvedPaths.insert_or_assign(path_.path.abs(), path);
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool isAllowedURI(std::string_view uri, const Strings & allowedUris)
|
||||||
|
{
|
||||||
|
/* 'uri' should be equal to a prefix, or in a subdirectory of a
|
||||||
|
prefix. Thus, the prefix https://github.co does not permit
|
||||||
|
access to https://github.com. */
|
||||||
|
for (auto & prefix : allowedUris) {
|
||||||
|
if (uri == prefix
|
||||||
|
// Allow access to subdirectories of the prefix.
|
||||||
|
|| (uri.size() > prefix.size()
|
||||||
|
&& prefix.size() > 0
|
||||||
|
&& hasPrefix(uri, prefix)
|
||||||
|
&& (
|
||||||
|
// Allow access to subdirectories of the prefix.
|
||||||
|
prefix[prefix.size() - 1] == '/'
|
||||||
|
|| uri[prefix.size()] == '/'
|
||||||
|
|
||||||
|
// Allow access to whole schemes
|
||||||
|
|| isJustSchemePrefix(prefix)
|
||||||
|
)
|
||||||
|
))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
void EvalState::checkURI(const std::string & uri)
|
void EvalState::checkURI(const std::string & uri)
|
||||||
{
|
{
|
||||||
if (!evalSettings.restrictEval) return;
|
if (!evalSettings.restrictEval) return;
|
||||||
|
|
||||||
/* 'uri' should be equal to a prefix, or in a subdirectory of a
|
if (isAllowedURI(uri, evalSettings.allowedUris.get())) return;
|
||||||
prefix. Thus, the prefix https://github.co does not permit
|
|
||||||
access to https://github.com. Note: this allows 'http://' and
|
|
||||||
'https://' as prefixes for any http/https URI. */
|
|
||||||
for (auto & prefix : evalSettings.allowedUris.get())
|
|
||||||
if (uri == prefix ||
|
|
||||||
(uri.size() > prefix.size()
|
|
||||||
&& prefix.size() > 0
|
|
||||||
&& hasPrefix(uri, prefix)
|
|
||||||
&& (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/')))
|
|
||||||
return;
|
|
||||||
|
|
||||||
/* If the URI is a path, then check it against allowedPaths as
|
/* If the URI is a path, then check it against allowedPaths as
|
||||||
well. */
|
well. */
|
||||||
if (hasPrefix(uri, "/")) {
|
if (hasPrefix(uri, "/")) {
|
||||||
checkSourcePath(rootPath(CanonPath(uri)));
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
|
rootFS2->checkAccess(CanonPath(uri));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasPrefix(uri, "file://")) {
|
if (hasPrefix(uri, "file://")) {
|
||||||
checkSourcePath(rootPath(CanonPath(std::string(uri, 7))));
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
|
rootFS2->checkAccess(CanonPath(uri.substr(7)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1179,10 +1152,8 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial)
|
void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
||||||
{
|
{
|
||||||
auto path = checkSourcePath(path_);
|
|
||||||
|
|
||||||
FileEvalCache::iterator i;
|
FileEvalCache::iterator i;
|
||||||
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
|
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
|
||||||
v = i->second;
|
v = i->second;
|
||||||
|
@ -1203,7 +1174,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial
|
||||||
e = j->second;
|
e = j->second;
|
||||||
|
|
||||||
if (!e)
|
if (!e)
|
||||||
e = parseExprFromFile(checkSourcePath(resolvedPath));
|
e = parseExprFromFile(resolvedPath);
|
||||||
|
|
||||||
fileParseCache[resolvedPath] = e;
|
fileParseCache[resolvedPath] = e;
|
||||||
|
|
||||||
|
@ -1512,7 +1483,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v)
|
||||||
e->eval(state, env, vTmp);
|
e->eval(state, env, vTmp);
|
||||||
|
|
||||||
for (auto & i : attrPath) {
|
for (auto & i : attrPath) {
|
||||||
state.forceValue(*vAttrs, noPos);
|
state.forceValue(*vAttrs, getPos());
|
||||||
Bindings::iterator j;
|
Bindings::iterator j;
|
||||||
auto name = getName(i, state, env);
|
auto name = getName(i, state, env);
|
||||||
if (vAttrs->type() != nAttrs ||
|
if (vAttrs->type() != nAttrs ||
|
||||||
|
@ -1681,7 +1652,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
if (countCalls) primOpCalls[name]++;
|
if (countCalls) primOpCalls[name]++;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
vCur.primOp->fun(*this, noPos, args, vCur);
|
vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||||
throw;
|
throw;
|
||||||
|
@ -1712,7 +1683,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
/* We have all the arguments, so call the primop with
|
/* We have all the arguments, so call the primop with
|
||||||
the previous and new arguments. */
|
the previous and new arguments. */
|
||||||
|
|
||||||
Value * vArgs[arity];
|
Value * vArgs[maxPrimOpArity];
|
||||||
auto n = argsDone;
|
auto n = argsDone;
|
||||||
for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->primOpApp.left)
|
for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->primOpApp.left)
|
||||||
vArgs[--n] = arg->primOpApp.right;
|
vArgs[--n] = arg->primOpApp.right;
|
||||||
|
@ -1729,7 +1700,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
// 1. Unify this and above code. Heavily redundant.
|
// 1. Unify this and above code. Heavily redundant.
|
||||||
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
|
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
|
||||||
// so the debugger allows to inspect the wrong parameters passed to the builtin.
|
// so the debugger allows to inspect the wrong parameters passed to the builtin.
|
||||||
primOp->primOp->fun(*this, noPos, vArgs, vCur);
|
primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||||
throw;
|
throw;
|
||||||
|
@ -1775,11 +1746,11 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
||||||
// 4: about 60
|
// 4: about 60
|
||||||
// 5: under 10
|
// 5: under 10
|
||||||
// This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total.
|
// This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total.
|
||||||
Value * vArgs[args.size()];
|
SmallValueVector<4> vArgs(args.size());
|
||||||
for (size_t i = 0; i < args.size(); ++i)
|
for (size_t i = 0; i < args.size(); ++i)
|
||||||
vArgs[i] = args[i]->maybeThunk(state, env);
|
vArgs[i] = args[i]->maybeThunk(state, env);
|
||||||
|
|
||||||
state.callFunction(vFun, args.size(), vArgs, v, pos);
|
state.callFunction(vFun, args.size(), vArgs.data(), v, pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1837,7 +1808,7 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos);
|
callFunction(fun, allocValue()->mkAttrs(attrs), res, pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1873,7 +1844,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||||
|
|
||||||
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
v.mkBool(!state.evalBool(env, e, noPos, "in the argument of the not operator")); // XXX: FIXME: !
|
v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: !
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2018,8 +1989,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
||||||
Value values[es->size()];
|
// List of returned strings. References to these Values must NOT be persisted.
|
||||||
Value * vTmpP = values;
|
SmallTemporaryValueVector<conservativeStackReservation> values(es->size());
|
||||||
|
Value * vTmpP = values.data();
|
||||||
|
|
||||||
for (auto & [i_pos, i] : *es) {
|
for (auto & [i_pos, i] : *es) {
|
||||||
Value & vTmp = *vTmpP++;
|
Value & vTmp = *vTmpP++;
|
||||||
|
@ -2313,7 +2285,7 @@ BackedStringView EvalState::coerceToString(
|
||||||
std::string result;
|
std::string result;
|
||||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||||
try {
|
try {
|
||||||
result += *coerceToString(noPos, *v2, context,
|
result += *coerceToString(pos, *v2, context,
|
||||||
"while evaluating one element of the list",
|
"while evaluating one element of the list",
|
||||||
coerceMore, copyToStore, canonicalizePath);
|
coerceMore, copyToStore, canonicalizePath);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
|
@ -2460,8 +2432,8 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
|
||||||
|
|
||||||
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v1, noPos);
|
forceValue(v1, pos);
|
||||||
forceValue(v2, noPos);
|
forceValue(v2, pos);
|
||||||
|
|
||||||
/* !!! Hack to support some old broken code that relies on pointer
|
/* !!! Hack to support some old broken code that relies on pointer
|
||||||
equality tests between sets. (Specifically, builderDefs calls
|
equality tests between sets. (Specifically, builderDefs calls
|
||||||
|
|
|
@ -30,7 +30,6 @@ class EvalState;
|
||||||
class StorePath;
|
class StorePath;
|
||||||
struct SingleDerivedPath;
|
struct SingleDerivedPath;
|
||||||
enum RepairFlag : bool;
|
enum RepairFlag : bool;
|
||||||
struct FSInputAccessor;
|
|
||||||
struct MemoryInputAccessor;
|
struct MemoryInputAccessor;
|
||||||
|
|
||||||
|
|
||||||
|
@ -217,18 +216,12 @@ public:
|
||||||
*/
|
*/
|
||||||
RepairFlag repair;
|
RepairFlag repair;
|
||||||
|
|
||||||
/**
|
|
||||||
* The allowed filesystem paths in restricted or pure evaluation
|
|
||||||
* mode.
|
|
||||||
*/
|
|
||||||
std::optional<PathSet> allowedPaths;
|
|
||||||
|
|
||||||
Bindings emptyBindings;
|
Bindings emptyBindings;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The accessor for the root filesystem.
|
* The accessor for the root filesystem.
|
||||||
*/
|
*/
|
||||||
const ref<FSInputAccessor> rootFS;
|
const ref<InputAccessor> rootFS;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The in-memory filesystem for <nix/...> paths.
|
* The in-memory filesystem for <nix/...> paths.
|
||||||
|
@ -342,11 +335,6 @@ private:
|
||||||
|
|
||||||
std::map<std::string, std::optional<std::string>> searchPathResolved;
|
std::map<std::string, std::optional<std::string>> searchPathResolved;
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache used by checkSourcePath().
|
|
||||||
*/
|
|
||||||
std::unordered_map<Path, SourcePath> resolvedPaths;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Cache used by prim_match().
|
* Cache used by prim_match().
|
||||||
*/
|
*/
|
||||||
|
@ -396,12 +384,6 @@ public:
|
||||||
*/
|
*/
|
||||||
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
||||||
|
|
||||||
/**
|
|
||||||
* Check whether access to a path is allowed and throw an error if
|
|
||||||
* not. Otherwise return the canonicalised path.
|
|
||||||
*/
|
|
||||||
SourcePath checkSourcePath(const SourcePath & path);
|
|
||||||
|
|
||||||
void checkURI(const std::string & uri);
|
void checkURI(const std::string & uri);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -445,13 +427,15 @@ public:
|
||||||
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Try to resolve a search path value (not the optional key part)
|
* Try to resolve a search path value (not the optional key part).
|
||||||
*
|
*
|
||||||
* If the specified search path element is a URI, download it.
|
* If the specified search path element is a URI, download it.
|
||||||
*
|
*
|
||||||
* If it is not found, return `std::nullopt`
|
* If it is not found, return `std::nullopt`
|
||||||
*/
|
*/
|
||||||
std::optional<std::string> resolveSearchPathPath(const SearchPath::Path & path);
|
std::optional<std::string> resolveSearchPathPath(
|
||||||
|
const SearchPath::Path & elem,
|
||||||
|
bool initAccessControl = false);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Evaluate an expression to normal form
|
* Evaluate an expression to normal form
|
||||||
|
@ -756,6 +740,13 @@ public:
|
||||||
*/
|
*/
|
||||||
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
|
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
|
||||||
|
|
||||||
|
/* Call the binary path filter predicate used builtins.path etc. */
|
||||||
|
bool callPathFilter(
|
||||||
|
Value * filterFun,
|
||||||
|
const SourcePath & path,
|
||||||
|
std::string_view pathArg,
|
||||||
|
PosIdx pos);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -841,6 +832,11 @@ std::string showType(const Value & v);
|
||||||
*/
|
*/
|
||||||
SourcePath resolveExprPath(SourcePath path);
|
SourcePath resolveExprPath(SourcePath path);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether a URI is allowed, assuming restrictEval is enabled
|
||||||
|
*/
|
||||||
|
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
|
||||||
|
|
||||||
struct InvalidPathError : EvalError
|
struct InvalidPathError : EvalError
|
||||||
{
|
{
|
||||||
Path path;
|
Path path;
|
||||||
|
|
|
@ -904,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const
|
||||||
// FIXME: as an optimization, if the flake contains a lock file
|
// FIXME: as an optimization, if the flake contains a lock file
|
||||||
// and we haven't changed it, then it's sufficient to use
|
// and we haven't changed it, then it's sufficient to use
|
||||||
// flake.sourceInfo.storePath for the fingerprint.
|
// flake.sourceInfo.storePath for the fingerprint.
|
||||||
return hashString(htSHA256,
|
return hashString(HashAlgorithm::SHA256,
|
||||||
fmt("%s;%s;%d;%d;%s",
|
fmt("%s;%s;%d;%d;%s",
|
||||||
flake.storePath.to_string(),
|
flake.storePath.to_string(),
|
||||||
flake.lockedRef.subdir,
|
flake.lockedRef.subdir,
|
||||||
|
|
|
@ -90,7 +90,7 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
||||||
fragment = percentDecode(url.substr(fragmentStart+1));
|
fragment = percentDecode(url.substr(fragmentStart+1));
|
||||||
}
|
}
|
||||||
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
|
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
|
||||||
query = decodeQuery(url.substr(pathEnd+1, fragmentStart));
|
query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (baseDir) {
|
if (baseDir) {
|
||||||
|
|
42
src/libexpr/gc-small-vector.hh
Normal file
42
src/libexpr/gc-small-vector.hh
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <boost/container/small_vector.hpp>
|
||||||
|
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
|
||||||
|
#include <gc/gc.h>
|
||||||
|
#include <gc/gc_cpp.h>
|
||||||
|
#include <gc/gc_allocator.h>
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
struct Value;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap.
|
||||||
|
*/
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
template <typename T, size_t nItems>
|
||||||
|
using SmallVector = boost::container::small_vector<T, nItems, traceable_allocator<T>>;
|
||||||
|
#else
|
||||||
|
template <typename T, size_t nItems>
|
||||||
|
using SmallVector = boost::container::small_vector<T, nItems>;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A vector of value pointers. See `SmallVector`.
|
||||||
|
*/
|
||||||
|
template <size_t nItems>
|
||||||
|
using SmallValueVector = SmallVector<Value *, nItems>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A vector of values that must not be referenced after the vector is destroyed.
|
||||||
|
*
|
||||||
|
* See also `SmallValueVector`.
|
||||||
|
*/
|
||||||
|
template <size_t nItems>
|
||||||
|
using SmallTemporaryValueVector = SmallVector<Value, nItems>;
|
||||||
|
|
||||||
|
}
|
|
@ -16,7 +16,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
|
||||||
|
|
||||||
libexpr_LIBS = libutil libstore libfetchers
|
libexpr_LIBS = libutil libstore libfetchers
|
||||||
|
|
||||||
libexpr_LDFLAGS += -lboost_context -lboost_regex -pthread
|
libexpr_LDFLAGS += -lboost_context -pthread
|
||||||
ifdef HOST_LINUX
|
ifdef HOST_LINUX
|
||||||
libexpr_LDFLAGS += -ldl
|
libexpr_LDFLAGS += -ldl
|
||||||
endif
|
endif
|
||||||
|
@ -36,7 +36,7 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
|
||||||
|
|
||||||
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||||
|
|
||||||
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||||
|
@ -47,4 +47,4 @@ $(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
||||||
|
|
||||||
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
|
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
|
||||||
|
|
||||||
src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
|
$(buildprefix)src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
|
||||||
|
|
|
@ -405,6 +405,7 @@ struct ExprOpNot : Expr
|
||||||
{
|
{
|
||||||
Expr * e;
|
Expr * e;
|
||||||
ExprOpNot(Expr * e) : e(e) { };
|
ExprOpNot(Expr * e) : e(e) { };
|
||||||
|
PosIdx getPos() const override { return e->getPos(); }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -692,16 +692,17 @@ SourcePath resolveExprPath(SourcePath path)
|
||||||
|
|
||||||
/* If `path' is a symlink, follow it. This is so that relative
|
/* If `path' is a symlink, follow it. This is so that relative
|
||||||
path references work. */
|
path references work. */
|
||||||
while (true) {
|
while (!path.path.isRoot()) {
|
||||||
// Basic cycle/depth limit to avoid infinite loops.
|
// Basic cycle/depth limit to avoid infinite loops.
|
||||||
if (++followCount >= maxFollow)
|
if (++followCount >= maxFollow)
|
||||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||||
if (path.lstat().type != InputAccessor::tSymlink) break;
|
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||||
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
|
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||||
|
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||||
}
|
}
|
||||||
|
|
||||||
/* If `path' refers to a directory, append `/default.nix'. */
|
/* If `path' refers to a directory, append `/default.nix'. */
|
||||||
if (path.lstat().type == InputAccessor::tDirectory)
|
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||||
return path + "default.nix";
|
return path + "default.nix";
|
||||||
|
|
||||||
return path;
|
return path;
|
||||||
|
@ -716,7 +717,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
auto buffer = path.readFile();
|
auto buffer = path.resolveSymlinks().readFile();
|
||||||
// readFile hopefully have left some extra space for terminators
|
// readFile hopefully have left some extra space for terminators
|
||||||
buffer.append("\0\0", 2);
|
buffer.append("\0\0", 2);
|
||||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||||
|
@ -783,7 +784,7 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0)
|
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||||
{
|
{
|
||||||
auto & value = value0.s;
|
auto & value = value0.s;
|
||||||
auto i = searchPathResolved.find(value);
|
auto i = searchPathResolved.find(value);
|
||||||
|
@ -800,7 +801,6 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||||
logWarning({
|
logWarning({
|
||||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||||
});
|
});
|
||||||
res = std::nullopt;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -814,6 +814,20 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||||
|
|
||||||
else {
|
else {
|
||||||
auto path = absPath(value);
|
auto path = absPath(value);
|
||||||
|
|
||||||
|
/* Allow access to paths in the search path. */
|
||||||
|
if (initAccessControl) {
|
||||||
|
allowPath(path);
|
||||||
|
if (store->isInStore(path)) {
|
||||||
|
try {
|
||||||
|
StorePathSet closure;
|
||||||
|
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||||
|
for (auto & p : closure)
|
||||||
|
allowPath(p);
|
||||||
|
} catch (InvalidPath &) { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (pathExists(path))
|
if (pathExists(path))
|
||||||
res = { path };
|
res = { path };
|
||||||
else {
|
else {
|
||||||
|
@ -829,7 +843,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||||
else
|
else
|
||||||
debug("failed to resolve search path element '%s'", value);
|
debug("failed to resolve search path element '%s'", value);
|
||||||
|
|
||||||
searchPathResolved[value] = res;
|
searchPathResolved.emplace(value, res);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "eval-settings.hh"
|
#include "eval-settings.hh"
|
||||||
|
#include "gc-small-vector.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "json-to-value.hh"
|
#include "json-to-value.hh"
|
||||||
#include "names.hh"
|
#include "names.hh"
|
||||||
|
@ -14,9 +15,9 @@
|
||||||
#include "value-to-json.hh"
|
#include "value-to-json.hh"
|
||||||
#include "value-to-xml.hh"
|
#include "value-to-xml.hh"
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
|
#include "fs-input-accessor.hh"
|
||||||
|
|
||||||
#include <boost/container/small_vector.hpp>
|
#include <boost/container/small_vector.hpp>
|
||||||
#include <boost/regex.hpp>
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
|
@ -25,6 +26,7 @@
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
|
#include <regex>
|
||||||
#include <dlfcn.h>
|
#include <dlfcn.h>
|
||||||
|
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
|
@ -89,9 +91,8 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
for (auto & [outputName, outputPath] : outputs) {
|
for (auto & [outputName, outputPath] : outputs) {
|
||||||
/* Add the output of this derivations to the allowed
|
/* Add the output of this derivations to the allowed
|
||||||
paths. */
|
paths. */
|
||||||
if (allowedPaths) {
|
allowPath(store->toRealPath(outputPath));
|
||||||
allowPath(outputPath);
|
|
||||||
}
|
|
||||||
/* Get all the output paths corresponding to the placeholders we had */
|
/* Get all the output paths corresponding to the placeholders we had */
|
||||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||||
res.insert_or_assign(
|
res.insert_or_assign(
|
||||||
|
@ -109,27 +110,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RealisePathFlags {
|
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
|
||||||
// Whether to check that the path is allowed in pure eval mode
|
|
||||||
bool checkForPureEval = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
|
||||||
{
|
{
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
|
|
||||||
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!context.empty()) {
|
if (!context.empty() && path.accessor == state.rootFS) {
|
||||||
auto rewrites = state.realiseContext(context);
|
auto rewrites = state.realiseContext(context);
|
||||||
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
||||||
return {path.accessor, CanonPath(realPath)};
|
path = {path.accessor, CanonPath(realPath)};
|
||||||
}
|
}
|
||||||
|
return resolveSymlinks ? path.resolveSymlinks() : path;
|
||||||
return flags.checkForPureEval
|
|
||||||
? state.checkSourcePath(path)
|
|
||||||
: path;
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
||||||
throw;
|
throw;
|
||||||
|
@ -169,7 +162,7 @@ static void mkOutputString(
|
||||||
argument. */
|
argument. */
|
||||||
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
||||||
{
|
{
|
||||||
auto path = realisePath(state, pos, vPath);
|
auto path = realisePath(state, pos, vPath, false);
|
||||||
auto path2 = path.path.abs();
|
auto path2 = path.path.abs();
|
||||||
|
|
||||||
// FIXME
|
// FIXME
|
||||||
|
@ -1316,7 +1309,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
.errPos = state.positions[noPos]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
||||||
|
|
||||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
||||||
|
|
||||||
|
@ -1338,7 +1331,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
.errPos = state.positions[noPos]
|
.errPos = state.positions[noPos]
|
||||||
});
|
});
|
||||||
|
|
||||||
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
||||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||||
|
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
|
@ -1347,13 +1340,13 @@ drvName, Bindings * attrs, Value & v)
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput::Impure {
|
DerivationOutput::Impure {
|
||||||
.method = method,
|
.method = method,
|
||||||
.hashType = ht,
|
.hashAlgo = ha,
|
||||||
});
|
});
|
||||||
else
|
else
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput::CAFloating {
|
DerivationOutput::CAFloating {
|
||||||
.method = method,
|
.method = method,
|
||||||
.hashType = ht,
|
.hashAlgo = ha,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1492,7 +1485,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path;
|
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
||||||
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
||||||
directly in the store. The latter condition is necessary so
|
directly in the store. The latter condition is necessary so
|
||||||
e.g. nix-push does the right thing. */
|
e.g. nix-push does the right thing. */
|
||||||
|
@ -1532,29 +1525,19 @@ static RegisterPrimOp primop_storePath({
|
||||||
|
|
||||||
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto & arg = *args[0];
|
|
||||||
|
|
||||||
/* We don’t check the path right now, because we don’t want to
|
|
||||||
throw if the path isn’t allowed, but just return false (and we
|
|
||||||
can’t just catch the exception here because we still want to
|
|
||||||
throw if something in the evaluation of `arg` tries to
|
|
||||||
access an unauthorized path). */
|
|
||||||
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
|
|
||||||
|
|
||||||
/* SourcePath doesn't know about trailing slash. */
|
|
||||||
auto mustBeDir = arg.type() == nString
|
|
||||||
&& (arg.string_view().ends_with("/")
|
|
||||||
|| arg.string_view().ends_with("/."));
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto checked = state.checkSourcePath(path);
|
auto & arg = *args[0];
|
||||||
auto st = checked.maybeLstat();
|
|
||||||
|
auto path = realisePath(state, pos, arg);
|
||||||
|
|
||||||
|
/* SourcePath doesn't know about trailing slash. */
|
||||||
|
auto mustBeDir = arg.type() == nString
|
||||||
|
&& (arg.string_view().ends_with("/")
|
||||||
|
|| arg.string_view().ends_with("/."));
|
||||||
|
|
||||||
|
auto st = path.maybeLstat();
|
||||||
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
||||||
v.mkBool(exists);
|
v.mkBool(exists);
|
||||||
} catch (SysError & e) {
|
|
||||||
/* Don't give away info from errors while canonicalising
|
|
||||||
‘path’ in restricted mode. */
|
|
||||||
v.mkBool(false);
|
|
||||||
} catch (RestrictedPathError & e) {
|
} catch (RestrictedPathError & e) {
|
||||||
v.mkBool(false);
|
v.mkBool(false);
|
||||||
}
|
}
|
||||||
|
@ -1698,7 +1681,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
|
|
||||||
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
||||||
|
|
||||||
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
|
v.mkPath(state.findFile(searchPath, path, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_findFile(PrimOp {
|
static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
|
@ -1753,17 +1736,17 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
/* Return the cryptographic hash of a file in base-16. */
|
/* Return the cryptographic hash of a file in base-16. */
|
||||||
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||||
std::optional<HashType> ht = parseHashType(type);
|
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||||
if (!ht)
|
if (!ha)
|
||||||
state.debugThrowLastTrace(Error({
|
state.debugThrowLastTrace(Error({
|
||||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||||
.errPos = state.positions[pos]
|
.errPos = state.positions[pos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
auto path = realisePath(state, pos, *args[1]);
|
auto path = realisePath(state, pos, *args[1]);
|
||||||
|
|
||||||
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
|
v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_hashFile({
|
static RegisterPrimOp primop_hashFile({
|
||||||
|
@ -1788,7 +1771,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
|
||||||
|
|
||||||
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto path = realisePath(state, pos, *args[0]);
|
auto path = realisePath(state, pos, *args[0], false);
|
||||||
/* Retrieve the directory entry type and stringize it. */
|
/* Retrieve the directory entry type and stringize it. */
|
||||||
v.mkString(fileTypeToString(path.lstat().type));
|
v.mkString(fileTypeToString(path.lstat().type));
|
||||||
}
|
}
|
||||||
|
@ -2177,11 +2160,35 @@ static RegisterPrimOp primop_toFile({
|
||||||
.fun = prim_toFile,
|
.fun = prim_toFile,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
bool EvalState::callPathFilter(
|
||||||
|
Value * filterFun,
|
||||||
|
const SourcePath & path,
|
||||||
|
std::string_view pathArg,
|
||||||
|
PosIdx pos)
|
||||||
|
{
|
||||||
|
auto st = path.lstat();
|
||||||
|
|
||||||
|
/* Call the filter function. The first argument is the path, the
|
||||||
|
second is a string indicating the type of the file. */
|
||||||
|
Value arg1;
|
||||||
|
arg1.mkString(pathArg);
|
||||||
|
|
||||||
|
Value arg2;
|
||||||
|
// assert that type is not "unknown"
|
||||||
|
arg2.mkString(fileTypeToString(st.type));
|
||||||
|
|
||||||
|
Value * args []{&arg1, &arg2};
|
||||||
|
Value res;
|
||||||
|
callFunction(*filterFun, 2, args, res, pos);
|
||||||
|
|
||||||
|
return forceBool(res, pos, "while evaluating the return value of the path filter function");
|
||||||
|
}
|
||||||
|
|
||||||
static void addPath(
|
static void addPath(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
const PosIdx pos,
|
const PosIdx pos,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
Path path,
|
SourcePath path,
|
||||||
Value * filterFun,
|
Value * filterFun,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
const std::optional<Hash> expectedHash,
|
const std::optional<Hash> expectedHash,
|
||||||
|
@ -2189,48 +2196,29 @@ static void addPath(
|
||||||
const NixStringContext & context)
|
const NixStringContext & context)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
// FIXME: handle CA derivation outputs (where path needs to
|
|
||||||
// be rewritten to the actual output).
|
|
||||||
auto rewrites = state.realiseContext(context);
|
|
||||||
path = state.toRealPath(rewriteStrings(path, rewrites), context);
|
|
||||||
|
|
||||||
StorePathSet refs;
|
StorePathSet refs;
|
||||||
|
|
||||||
if (state.store->isInStore(path)) {
|
if (path.accessor == state.rootFS && state.store->isInStore(path.path.abs())) {
|
||||||
|
// FIXME: handle CA derivation outputs (where path needs to
|
||||||
|
// be rewritten to the actual output).
|
||||||
|
auto rewrites = state.realiseContext(context);
|
||||||
|
path = {state.rootFS, CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto [storePath, subPath] = state.store->toStorePath(path);
|
auto [storePath, subPath] = state.store->toStorePath(path.path.abs());
|
||||||
// FIXME: we should scanForReferences on the path before adding it
|
// FIXME: we should scanForReferences on the path before adding it
|
||||||
refs = state.store->queryPathInfo(storePath)->references;
|
refs = state.store->queryPathInfo(storePath)->references;
|
||||||
path = state.store->toRealPath(storePath) + subPath;
|
path = {state.rootFS, CanonPath(state.store->toRealPath(storePath) + subPath)};
|
||||||
} catch (Error &) { // FIXME: should be InvalidPathError
|
} catch (Error &) { // FIXME: should be InvalidPathError
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
path = evalSettings.pureEval && expectedHash
|
std::unique_ptr<PathFilter> filter;
|
||||||
? path
|
if (filterFun)
|
||||||
: state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs();
|
filter = std::make_unique<PathFilter>([&](const Path & p) {
|
||||||
|
auto p2 = CanonPath(p);
|
||||||
PathFilter filter = filterFun ? ([&](const Path & path) {
|
return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos);
|
||||||
auto st = lstat(path);
|
});
|
||||||
|
|
||||||
/* Call the filter function. The first argument is the path,
|
|
||||||
the second is a string indicating the type of the file. */
|
|
||||||
Value arg1;
|
|
||||||
arg1.mkString(path);
|
|
||||||
|
|
||||||
Value arg2;
|
|
||||||
arg2.mkString(
|
|
||||||
S_ISREG(st.st_mode) ? "regular" :
|
|
||||||
S_ISDIR(st.st_mode) ? "directory" :
|
|
||||||
S_ISLNK(st.st_mode) ? "symlink" :
|
|
||||||
"unknown" /* not supported, will fail! */);
|
|
||||||
|
|
||||||
Value * args []{&arg1, &arg2};
|
|
||||||
Value res;
|
|
||||||
state.callFunction(*filterFun, 2, args, res, pos);
|
|
||||||
|
|
||||||
return state.forceBool(res, pos, "while evaluating the return value of the path filter function");
|
|
||||||
}) : defaultPathFilter;
|
|
||||||
|
|
||||||
std::optional<StorePath> expectedStorePath;
|
std::optional<StorePath> expectedStorePath;
|
||||||
if (expectedHash)
|
if (expectedHash)
|
||||||
|
@ -2241,7 +2229,7 @@ static void addPath(
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair);
|
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
|
||||||
if (expectedHash && expectedStorePath != dstPath)
|
if (expectedHash && expectedStorePath != dstPath)
|
||||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||||
state.allowAndSetStorePathString(dstPath, v);
|
state.allowAndSetStorePathString(dstPath, v);
|
||||||
|
@ -2260,7 +2248,8 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
auto path = state.coerceToPath(pos, *args[1], context,
|
auto path = state.coerceToPath(pos, *args[1], context,
|
||||||
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
|
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
|
||||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
||||||
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
|
||||||
|
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_filterSource({
|
static RegisterPrimOp primop_filterSource({
|
||||||
|
@ -2340,7 +2329,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
else if (n == "recursive")
|
else if (n == "recursive")
|
||||||
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
|
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
||||||
|
@ -2355,7 +2344,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
if (name.empty())
|
if (name.empty())
|
||||||
name = path->baseName();
|
name = path->baseName();
|
||||||
|
|
||||||
addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context);
|
addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_path({
|
static RegisterPrimOp primop_path({
|
||||||
|
@ -2729,7 +2718,7 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
auto attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.catAttrs"));
|
auto attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.catAttrs"));
|
||||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
|
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
|
||||||
|
|
||||||
Value * res[args[1]->listSize()];
|
SmallValueVector<nonRecursiveStackReservation> res(args[1]->listSize());
|
||||||
size_t found = 0;
|
size_t found = 0;
|
||||||
|
|
||||||
for (auto v2 : args[1]->listItems()) {
|
for (auto v2 : args[1]->listItems()) {
|
||||||
|
@ -3064,8 +3053,7 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||||
|
|
||||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filter");
|
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filter");
|
||||||
|
|
||||||
// FIXME: putting this on the stack is risky.
|
SmallValueVector<nonRecursiveStackReservation> vs(args[1]->listSize());
|
||||||
Value * vs[args[1]->listSize()];
|
|
||||||
size_t k = 0;
|
size_t k = 0;
|
||||||
|
|
||||||
bool same = true;
|
bool same = true;
|
||||||
|
@ -3461,7 +3449,8 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.concatMap");
|
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.concatMap");
|
||||||
auto nrLists = args[1]->listSize();
|
auto nrLists = args[1]->listSize();
|
||||||
|
|
||||||
Value lists[nrLists];
|
// List of returned lists before concatenation. References to these Values must NOT be persisted.
|
||||||
|
SmallTemporaryValueVector<conservativeStackReservation> lists(nrLists);
|
||||||
size_t len = 0;
|
size_t len = 0;
|
||||||
|
|
||||||
for (unsigned int n = 0; n < nrLists; ++n) {
|
for (unsigned int n = 0; n < nrLists; ++n) {
|
||||||
|
@ -3765,18 +3754,18 @@ static RegisterPrimOp primop_stringLength({
|
||||||
/* Return the cryptographic hash of a string in base-16. */
|
/* Return the cryptographic hash of a string in base-16. */
|
||||||
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||||
std::optional<HashType> ht = parseHashType(type);
|
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||||
if (!ht)
|
if (!ha)
|
||||||
state.debugThrowLastTrace(Error({
|
state.debugThrowLastTrace(Error({
|
||||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||||
.errPos = state.positions[pos]
|
.errPos = state.positions[pos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
NixStringContext context; // discarded
|
NixStringContext context; // discarded
|
||||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||||
|
|
||||||
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
|
v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_hashString({
|
static RegisterPrimOp primop_hashString({
|
||||||
|
@ -3799,15 +3788,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
|
||||||
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
||||||
|
|
||||||
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
|
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
|
||||||
std::optional<HashType> ht = std::nullopt;
|
std::optional<HashAlgorithm> ha = std::nullopt;
|
||||||
if (iteratorHashAlgo != inputAttrs->end()) {
|
if (iteratorHashAlgo != inputAttrs->end()) {
|
||||||
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
|
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
|
||||||
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
||||||
|
|
||||||
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
|
v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_convertHash({
|
static RegisterPrimOp primop_convertHash({
|
||||||
|
@ -3836,7 +3825,8 @@ static RegisterPrimOp primop_convertHash({
|
||||||
|
|
||||||
The format of the resulting hash. Must be one of
|
The format of the resulting hash. Must be one of
|
||||||
- `"base16"`
|
- `"base16"`
|
||||||
- `"base32"`
|
- `"nix32"`
|
||||||
|
- `"base32"` (deprecated alias for `"nix32"`)
|
||||||
- `"base64"`
|
- `"base64"`
|
||||||
- `"sri"`
|
- `"sri"`
|
||||||
|
|
||||||
|
@ -3885,30 +3875,19 @@ static RegisterPrimOp primop_convertHash({
|
||||||
.fun = prim_convertHash,
|
.fun = prim_convertHash,
|
||||||
});
|
});
|
||||||
|
|
||||||
// regex aliases, switch between boost and std
|
|
||||||
using regex = boost::regex;
|
|
||||||
using regex_error = boost::regex_error;
|
|
||||||
using cmatch = boost::cmatch;
|
|
||||||
using cregex_iterator = boost::cregex_iterator;
|
|
||||||
namespace regex_constants = boost::regex_constants;
|
|
||||||
// overloaded function alias
|
|
||||||
constexpr auto regex_match = [] (auto &&...args) {
|
|
||||||
return boost::regex_match(std::forward<decltype(args)>(args)...);
|
|
||||||
};
|
|
||||||
|
|
||||||
struct RegexCache
|
struct RegexCache
|
||||||
{
|
{
|
||||||
// TODO use C++20 transparent comparison when available
|
// TODO use C++20 transparent comparison when available
|
||||||
std::unordered_map<std::string_view, regex> cache;
|
std::unordered_map<std::string_view, std::regex> cache;
|
||||||
std::list<std::string> keys;
|
std::list<std::string> keys;
|
||||||
|
|
||||||
regex get(std::string_view re)
|
std::regex get(std::string_view re)
|
||||||
{
|
{
|
||||||
auto it = cache.find(re);
|
auto it = cache.find(re);
|
||||||
if (it != cache.end())
|
if (it != cache.end())
|
||||||
return it->second;
|
return it->second;
|
||||||
keys.emplace_back(re);
|
keys.emplace_back(re);
|
||||||
return cache.emplace(keys.back(), regex(keys.back(), regex::extended)).first->second;
|
return cache.emplace(keys.back(), std::regex(keys.back(), std::regex::extended)).first->second;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -3928,8 +3907,8 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match");
|
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match");
|
||||||
|
|
||||||
cmatch match;
|
std::cmatch match;
|
||||||
if (!regex_match(str.begin(), str.end(), match, regex)) {
|
if (!std::regex_match(str.begin(), str.end(), match, regex)) {
|
||||||
v.mkNull();
|
v.mkNull();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -3944,8 +3923,8 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
(v.listElems()[i] = state.allocValue())->mkString(match[i + 1].str());
|
(v.listElems()[i] = state.allocValue())->mkString(match[i + 1].str());
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (regex_error & e) {
|
} catch (std::regex_error & e) {
|
||||||
if (e.code() == regex_constants::error_space) {
|
if (e.code() == std::regex_constants::error_space) {
|
||||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||||
|
@ -4008,8 +3987,8 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split");
|
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split");
|
||||||
|
|
||||||
auto begin = cregex_iterator(str.begin(), str.end(), regex);
|
auto begin = std::cregex_iterator(str.begin(), str.end(), regex);
|
||||||
auto end = cregex_iterator();
|
auto end = std::cregex_iterator();
|
||||||
|
|
||||||
// Any matches results are surrounded by non-matching results.
|
// Any matches results are surrounded by non-matching results.
|
||||||
const size_t len = std::distance(begin, end);
|
const size_t len = std::distance(begin, end);
|
||||||
|
@ -4048,8 +4027,8 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
|
|
||||||
assert(idx == 2 * len + 1);
|
assert(idx == 2 * len + 1);
|
||||||
|
|
||||||
} catch (regex_error & e) {
|
} catch (std::regex_error & e) {
|
||||||
if (e.code() == regex_constants::error_space) {
|
if (e.code() == std::regex_constants::error_space) {
|
||||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||||
|
@ -4410,7 +4389,7 @@ void EvalState::createBaseEnv()
|
||||||
addConstant("__currentSystem", v, {
|
addConstant("__currentSystem", v, {
|
||||||
.type = nString,
|
.type = nString,
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-pure-eval).
|
The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-system).
|
||||||
|
|
||||||
It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression:
|
It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression:
|
||||||
|
|
||||||
|
@ -4459,7 +4438,7 @@ void EvalState::createBaseEnv()
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use.
|
Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use.
|
||||||
|
|
||||||
This value is determined by the `store` parameter in [Store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md):
|
This value is determined by the `store` parameter in [Store URLs](@docroot@/store/types/index.md#store-url-format):
|
||||||
|
|
||||||
```shell-session
|
```shell-session
|
||||||
$ nix-instantiate --store 'dummy://?store=/blah' --eval --expr builtins.storeDir
|
$ nix-instantiate --store 'dummy://?store=/blah' --eval --expr builtins.storeDir
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "realisation.hh"
|
||||||
#include "make-content-addressed.hh"
|
#include "make-content-addressed.hh"
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
// be both a revision or a branch/tag name.
|
// be both a revision or a branch/tag name.
|
||||||
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
||||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||||
rev = Hash::parseAny(value, htSHA1);
|
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||||
else
|
else
|
||||||
ref = value;
|
ref = value;
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||||
// Backward compatibility: set 'rev' to
|
// Backward compatibility: set 'rev' to
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1));
|
||||||
attrs2.alloc("rev").mkString(rev2.gitRev());
|
attrs2.alloc("rev").mkString(rev2.gitRev());
|
||||||
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
||||||
if (auto revCount = input2.getRevCount())
|
if (auto revCount = input2.getRevCount())
|
||||||
|
|
|
@ -46,7 +46,7 @@ void emitTreeAttrs(
|
||||||
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
||||||
} else if (emptyRevFallback) {
|
} else if (emptyRevFallback) {
|
||||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||||
auto emptyHash = Hash(htSHA1);
|
auto emptyHash = Hash(HashAlgorithm::SHA1);
|
||||||
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
||||||
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
||||||
}
|
}
|
||||||
|
@ -187,45 +187,215 @@ static RegisterPrimOp primop_fetchTree({
|
||||||
.name = "fetchTree",
|
.name = "fetchTree",
|
||||||
.args = {"input"},
|
.args = {"input"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Fetch a source tree or a plain file using one of the supported backends.
|
Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with:
|
||||||
*input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax.
|
|
||||||
The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled.
|
|
||||||
|
|
||||||
> **Note**
|
- the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path)
|
||||||
|
- the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash
|
||||||
|
- backend-specific metadata (currently not documented). <!-- TODO: document output attributes -->
|
||||||
|
|
||||||
|
*input* must be an attribute set with the following attributes:
|
||||||
|
|
||||||
|
- `type` (String, required)
|
||||||
|
|
||||||
|
One of the [supported source types](#source-types).
|
||||||
|
This determines other required and allowed input attributes.
|
||||||
|
|
||||||
|
- `narHash` (String, optional)
|
||||||
|
|
||||||
|
The `narHash` parameter can be used to substitute the source of the tree.
|
||||||
|
It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism.
|
||||||
|
If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available.
|
||||||
|
|
||||||
|
A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again.
|
||||||
|
That is, `fetchTree` is idempotent.
|
||||||
|
|
||||||
|
Downloads are cached in `$XDG_CACHE_HOME/nix`.
|
||||||
|
The remote source will be fetched from the network if both are true:
|
||||||
|
- A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching.
|
||||||
|
|
||||||
|
- There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl)
|
||||||
|
|
||||||
|
## Source types
|
||||||
|
|
||||||
|
The following source types and associated input attributes are supported.
|
||||||
|
|
||||||
|
<!-- TODO: It would be soooo much more predictable to work with (and
|
||||||
|
document) if `fetchTree` was a curried call with the first paramter for
|
||||||
|
`type` or an attribute like `builtins.fetchTree.git`! -->
|
||||||
|
|
||||||
|
- `"file"`
|
||||||
|
|
||||||
|
Place a plain file into the Nix store.
|
||||||
|
This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl)
|
||||||
|
|
||||||
|
- `url` (String, required)
|
||||||
|
|
||||||
|
Supported protocols:
|
||||||
|
|
||||||
|
- `https`
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "file";
|
||||||
|
> url = "https://example.com/index.html";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
- `http`
|
||||||
|
|
||||||
|
Insecure HTTP transfer for legacy sources.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> HTTP performs no encryption or authentication.
|
||||||
|
> Use a `narHash` known in advance to ensure the output has expected contents.
|
||||||
|
|
||||||
|
- `file`
|
||||||
|
|
||||||
|
A file on the local file system.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "file";
|
||||||
|
> url = "file:///home/eelco/nix/README.md";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
- `"tarball"`
|
||||||
|
|
||||||
|
Download a tar archive and extract it into the Nix store.
|
||||||
|
This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
|
||||||
|
|
||||||
|
- `url` (String, required)
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "tarball";
|
||||||
|
> url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
- `"git"`
|
||||||
|
|
||||||
|
Fetch a Git tree and copy it to the Nix store.
|
||||||
|
This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit).
|
||||||
|
|
||||||
|
- `url` (String, required)
|
||||||
|
|
||||||
|
The URL formats supported are the same as for Git itself.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "git";
|
||||||
|
> url = "git@github.com:NixOS/nixpkgs.git";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory.
|
||||||
|
|
||||||
|
- `ref` (String, optional)
|
||||||
|
|
||||||
|
A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name.
|
||||||
|
|
||||||
|
Default: `"HEAD"`
|
||||||
|
|
||||||
|
- `rev` (String, optional)
|
||||||
|
|
||||||
|
A Git revision; a commit hash.
|
||||||
|
|
||||||
|
Default: the tip of `ref`
|
||||||
|
|
||||||
|
- `shallow` (Bool, optional)
|
||||||
|
|
||||||
|
Make a shallow clone when fetching the Git tree.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
- `submodules` (Bool, optional)
|
||||||
|
|
||||||
|
Also fetch submodules if available.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
- `allRefs` (Bool, optional)
|
||||||
|
|
||||||
|
If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache.
|
||||||
|
Otherwise, only the latest commit is fetched if it is not already cached.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
- `lastModified` (Integer, optional)
|
||||||
|
|
||||||
|
Unix timestamp of the fetched commit.
|
||||||
|
|
||||||
|
If set, pass through the value to the output attribute set.
|
||||||
|
Otherwise, generated from the fetched Git tree.
|
||||||
|
|
||||||
|
- `revCount` (Integer, optional)
|
||||||
|
|
||||||
|
Number of revisions in the history of the Git repository before the fetched commit.
|
||||||
|
|
||||||
|
If set, pass through the value to the output attribute set.
|
||||||
|
Otherwise, generated from the fetched Git tree.
|
||||||
|
|
||||||
|
The following input types are still subject to change:
|
||||||
|
|
||||||
|
- `"path"`
|
||||||
|
- `"github"`
|
||||||
|
- `"gitlab"`
|
||||||
|
- `"sourcehut"`
|
||||||
|
- `"mercurial"`
|
||||||
|
|
||||||
|
*input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references).
|
||||||
|
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
>
|
>
|
||||||
> The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
> Fetch a GitHub repository using the attribute set representation:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> builtins.fetchTree {
|
||||||
|
> type = "github";
|
||||||
|
> owner = "NixOS";
|
||||||
|
> repo = "nixpkgs";
|
||||||
|
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> This evaluates to the following attribute set:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> {
|
||||||
|
> lastModified = 1686503798;
|
||||||
|
> lastModifiedDate = "20230611171638";
|
||||||
|
> narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
||||||
|
> outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
||||||
|
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||||
|
> shortRev = "ae2e6b3";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
Here are some examples of how to use `fetchTree`:
|
> **Example**
|
||||||
|
>
|
||||||
- Fetch a GitHub repository using the attribute set representation:
|
> Fetch the same GitHub repository using the URL-like syntax:
|
||||||
|
>
|
||||||
```nix
|
> ```nix
|
||||||
builtins.fetchTree {
|
> builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
|
||||||
type = "github";
|
> ```
|
||||||
owner = "NixOS";
|
|
||||||
repo = "nixpkgs";
|
|
||||||
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
This evaluates to the following attribute set:
|
|
||||||
|
|
||||||
```
|
|
||||||
{
|
|
||||||
lastModified = 1686503798;
|
|
||||||
lastModifiedDate = "20230611171638";
|
|
||||||
narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
|
||||||
outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
|
||||||
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
|
||||||
shortRev = "ae2e6b3";
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- Fetch the same GitHub repository using the URL-like syntax:
|
|
||||||
|
|
||||||
```
|
|
||||||
builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
|
|
||||||
```
|
|
||||||
)",
|
)",
|
||||||
.fun = prim_fetchTree,
|
.fun = prim_fetchTree,
|
||||||
.experimentalFeature = Xp::FetchTree,
|
.experimentalFeature = Xp::FetchTree,
|
||||||
|
@ -246,7 +416,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
|
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||||
else
|
else
|
||||||
|
@ -276,7 +446,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
||||||
|
|
||||||
// early exit if pinned and already in the store
|
// early exit if pinned and already in the store
|
||||||
if (expectedHash && expectedHash->type == htSHA256) {
|
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||||
auto expectedPath = state.store->makeFixedOutputPath(
|
auto expectedPath = state.store->makeFixedOutputPath(
|
||||||
name,
|
name,
|
||||||
FixedOutputInfo {
|
FixedOutputInfo {
|
||||||
|
@ -301,10 +471,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
if (expectedHash) {
|
if (expectedHash) {
|
||||||
auto hash = unpack
|
auto hash = unpack
|
||||||
? state.store->queryPathInfo(storePath)->narHash
|
? state.store->queryPathInfo(storePath)->narHash
|
||||||
: hashFile(htSHA256, state.store->toRealPath(storePath));
|
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
|
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
|
||||||
}
|
}
|
||||||
|
|
||||||
state.allowAndSetStorePathString(storePath, v);
|
state.allowAndSetStorePathString(storePath, v);
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
check: libexpr-tests_RUN
|
|
||||||
|
|
||||||
programs += libexpr-tests
|
|
||||||
|
|
||||||
libexpr-tests_NAME := libnixexpr-tests
|
|
||||||
|
|
||||||
libexpr-tests_DIR := $(d)
|
|
||||||
|
|
||||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
|
||||||
libexpr-tests_INSTALL_DIR := $(checkbindir)
|
|
||||||
else
|
|
||||||
libexpr-tests_INSTALL_DIR :=
|
|
||||||
endif
|
|
||||||
|
|
||||||
libexpr-tests_SOURCES := \
|
|
||||||
$(wildcard $(d)/*.cc) \
|
|
||||||
$(wildcard $(d)/value/*.cc)
|
|
||||||
|
|
||||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
|
|
||||||
|
|
||||||
libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers
|
|
||||||
|
|
||||||
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
|
|
@ -67,7 +67,6 @@ class Symbol;
|
||||||
class PosIdx;
|
class PosIdx;
|
||||||
struct Pos;
|
struct Pos;
|
||||||
class StorePath;
|
class StorePath;
|
||||||
class Store;
|
|
||||||
class EvalState;
|
class EvalState;
|
||||||
class XMLWriter;
|
class XMLWriter;
|
||||||
|
|
||||||
|
@ -424,10 +423,9 @@ public:
|
||||||
SourcePath path() const
|
SourcePath path() const
|
||||||
{
|
{
|
||||||
assert(internalType == tPath);
|
assert(internalType == tPath);
|
||||||
return SourcePath {
|
return SourcePath(
|
||||||
.accessor = ref(_path.accessor->shared_from_this()),
|
ref(_path.accessor->shared_from_this()),
|
||||||
.path = CanonPath(CanonPath::unchecked_t(), _path.path)
|
CanonPath(CanonPath::unchecked_t(), _path.path));
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view string_view() const
|
std::string_view string_view() const
|
||||||
|
|
|
@ -106,7 +106,7 @@ std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
|
||||||
|
|
||||||
Hash getRevAttr(const Attrs & attrs, const std::string & name)
|
Hash getRevAttr(const Attrs & attrs, const std::string & name)
|
||||||
{
|
{
|
||||||
return Hash::parseAny(getStrAttr(attrs, name), htSHA1);
|
return Hash::parseAny(getStrAttr(attrs, name), HashAlgorithm::SHA1);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -289,8 +289,8 @@ std::string Input::getType() const
|
||||||
std::optional<Hash> Input::getNarHash() const
|
std::optional<Hash> Input::getNarHash() const
|
||||||
{
|
{
|
||||||
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
|
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
|
||||||
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
|
auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s);
|
||||||
if (hash.type != htSHA256)
|
if (hash.algo != HashAlgorithm::SHA256)
|
||||||
throw UsageError("narHash must use SHA-256");
|
throw UsageError("narHash must use SHA-256");
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
@ -314,7 +314,7 @@ std::optional<Hash> Input::getRev() const
|
||||||
} catch (BadHash &e) {
|
} catch (BadHash &e) {
|
||||||
// Default to sha1 for backwards compatibility with existing
|
// Default to sha1 for backwards compatibility with existing
|
||||||
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
||||||
hash = Hash::parseAny(*s, htSHA1);
|
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||||
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
||||||
{
|
{
|
||||||
auto [accessor, input2] = getAccessor(store, input);
|
auto [accessor, input2] = getAccessor(store, input);
|
||||||
auto storePath = accessor->root().fetchToStore(store, input2.getName());
|
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
|
||||||
return {storePath, input2};
|
return {storePath, input2};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
83
src/libfetchers/filtering-input-accessor.cc
Normal file
83
src/libfetchers/filtering-input-accessor.cc
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
#include "filtering-input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
std::string FilteringInputAccessor::readFile(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
return next->readFile(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FilteringInputAccessor::pathExists(const CanonPath & path)
|
||||||
|
{
|
||||||
|
return isAllowed(path) && next->pathExists(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
return next->maybeLstat(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
DirEntries entries;
|
||||||
|
for (auto & entry : next->readDirectory(prefix + path)) {
|
||||||
|
if (isAllowed(path + entry.first))
|
||||||
|
entries.insert(std::move(entry));
|
||||||
|
}
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string FilteringInputAccessor::readLink(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
return next->readLink(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string FilteringInputAccessor::showPath(const CanonPath & path)
|
||||||
|
{
|
||||||
|
return next->showPath(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FilteringInputAccessor::checkAccess(const CanonPath & path)
|
||||||
|
{
|
||||||
|
if (!isAllowed(path))
|
||||||
|
throw makeNotAllowedError
|
||||||
|
? makeNotAllowedError(path)
|
||||||
|
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
struct AllowListInputAccessorImpl : AllowListInputAccessor
|
||||||
|
{
|
||||||
|
std::set<CanonPath> allowedPaths;
|
||||||
|
|
||||||
|
AllowListInputAccessorImpl(
|
||||||
|
ref<InputAccessor> next,
|
||||||
|
std::set<CanonPath> && allowedPaths,
|
||||||
|
MakeNotAllowedError && makeNotAllowedError)
|
||||||
|
: AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
|
||||||
|
, allowedPaths(std::move(allowedPaths))
|
||||||
|
{ }
|
||||||
|
|
||||||
|
bool isAllowed(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
return path.isAllowed(allowedPaths);
|
||||||
|
}
|
||||||
|
|
||||||
|
void allowPath(CanonPath path) override
|
||||||
|
{
|
||||||
|
allowedPaths.insert(std::move(path));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ref<AllowListInputAccessor> AllowListInputAccessor::create(
|
||||||
|
ref<InputAccessor> next,
|
||||||
|
std::set<CanonPath> && allowedPaths,
|
||||||
|
MakeNotAllowedError && makeNotAllowedError)
|
||||||
|
{
|
||||||
|
return make_ref<AllowListInputAccessorImpl>(next, std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
73
src/libfetchers/filtering-input-accessor.hh
Normal file
73
src/libfetchers/filtering-input-accessor.hh
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A function that should throw an exception of type
|
||||||
|
* `RestrictedPathError` explaining that access to `path` is
|
||||||
|
* forbidden.
|
||||||
|
*/
|
||||||
|
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An abstract wrapping `InputAccessor` that performs access
|
||||||
|
* control. Subclasses should override `isAllowed()` to implement an
|
||||||
|
* access control policy. The error message is customized at construction.
|
||||||
|
*/
|
||||||
|
struct FilteringInputAccessor : InputAccessor
|
||||||
|
{
|
||||||
|
ref<InputAccessor> next;
|
||||||
|
CanonPath prefix;
|
||||||
|
MakeNotAllowedError makeNotAllowedError;
|
||||||
|
|
||||||
|
FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError)
|
||||||
|
: next(src.accessor)
|
||||||
|
, prefix(src.path)
|
||||||
|
, makeNotAllowedError(std::move(makeNotAllowedError))
|
||||||
|
{ }
|
||||||
|
|
||||||
|
std::string readFile(const CanonPath & path) override;
|
||||||
|
|
||||||
|
bool pathExists(const CanonPath & path) override;
|
||||||
|
|
||||||
|
std::optional<Stat> maybeLstat(const CanonPath & path) override;
|
||||||
|
|
||||||
|
DirEntries readDirectory(const CanonPath & path) override;
|
||||||
|
|
||||||
|
std::string readLink(const CanonPath & path) override;
|
||||||
|
|
||||||
|
std::string showPath(const CanonPath & path) override;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call `makeNotAllowedError` to throw a `RestrictedPathError`
|
||||||
|
* exception if `isAllowed()` returns `false` for `path`.
|
||||||
|
*/
|
||||||
|
void checkAccess(const CanonPath & path);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return `true` iff access to path is allowed.
|
||||||
|
*/
|
||||||
|
virtual bool isAllowed(const CanonPath & path) = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A wrapping `InputAccessor` that checks paths against an allow-list.
|
||||||
|
*/
|
||||||
|
struct AllowListInputAccessor : public FilteringInputAccessor
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Grant access to the specified path.
|
||||||
|
*/
|
||||||
|
virtual void allowPath(CanonPath path) = 0;
|
||||||
|
|
||||||
|
static ref<AllowListInputAccessor> create(
|
||||||
|
ref<InputAccessor> next,
|
||||||
|
std::set<CanonPath> && allowedPaths,
|
||||||
|
MakeNotAllowedError && makeNotAllowedError);
|
||||||
|
|
||||||
|
using FilteringInputAccessor::FilteringInputAccessor;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -4,19 +4,12 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
|
||||||
{
|
{
|
||||||
CanonPath root;
|
CanonPath root;
|
||||||
std::optional<std::set<CanonPath>> allowedPaths;
|
|
||||||
MakeNotAllowedError makeNotAllowedError;
|
|
||||||
|
|
||||||
FSInputAccessorImpl(
|
FSInputAccessor(const CanonPath & root)
|
||||||
const CanonPath & root,
|
|
||||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
|
||||||
MakeNotAllowedError && makeNotAllowedError)
|
|
||||||
: root(root)
|
: root(root)
|
||||||
, allowedPaths(std::move(allowedPaths))
|
|
||||||
, makeNotAllowedError(std::move(makeNotAllowedError))
|
|
||||||
{
|
{
|
||||||
displayPrefix = root.isRoot() ? "" : root.abs();
|
displayPrefix = root.isRoot() ? "" : root.abs();
|
||||||
}
|
}
|
||||||
|
@ -27,39 +20,30 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||||
std::function<void(uint64_t)> sizeCallback) override
|
std::function<void(uint64_t)> sizeCallback) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
auto absPath = makeAbsPath(path);
|
||||||
checkAllowed(absPath);
|
|
||||||
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool pathExists(const CanonPath & path) override
|
bool pathExists(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
return PosixSourceAccessor::pathExists(makeAbsPath(path));
|
||||||
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
|
||||||
checkAllowed(absPath);
|
|
||||||
return PosixSourceAccessor::maybeLstat(absPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DirEntries readDirectory(const CanonPath & path) override
|
DirEntries readDirectory(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
|
||||||
checkAllowed(absPath);
|
|
||||||
DirEntries res;
|
DirEntries res;
|
||||||
for (auto & entry : PosixSourceAccessor::readDirectory(absPath))
|
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
|
||||||
if (isAllowed(absPath + entry.first))
|
res.emplace(entry);
|
||||||
res.emplace(entry);
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string readLink(const CanonPath & path) override
|
std::string readLink(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
return PosixSourceAccessor::readLink(makeAbsPath(path));
|
||||||
checkAllowed(absPath);
|
|
||||||
return PosixSourceAccessor::readLink(absPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CanonPath makeAbsPath(const CanonPath & path)
|
CanonPath makeAbsPath(const CanonPath & path)
|
||||||
|
@ -67,59 +51,22 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||||
return root + path;
|
return root + path;
|
||||||
}
|
}
|
||||||
|
|
||||||
void checkAllowed(const CanonPath & absPath) override
|
|
||||||
{
|
|
||||||
if (!isAllowed(absPath))
|
|
||||||
throw makeNotAllowedError
|
|
||||||
? makeNotAllowedError(absPath)
|
|
||||||
: RestrictedPathError("access to path '%s' is forbidden", absPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isAllowed(const CanonPath & absPath)
|
|
||||||
{
|
|
||||||
if (!absPath.isWithin(root))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (allowedPaths) {
|
|
||||||
auto p = absPath.removePrefix(root);
|
|
||||||
if (!p.isAllowed(*allowedPaths))
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void allowPath(CanonPath path) override
|
|
||||||
{
|
|
||||||
if (allowedPaths)
|
|
||||||
allowedPaths->insert(std::move(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool hasAccessControl() override
|
|
||||||
{
|
|
||||||
return (bool) allowedPaths;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
return makeAbsPath(path);
|
return makeAbsPath(path);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<FSInputAccessor> makeFSInputAccessor(
|
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
|
||||||
const CanonPath & root,
|
|
||||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
|
||||||
MakeNotAllowedError && makeNotAllowedError)
|
|
||||||
{
|
{
|
||||||
return make_ref<FSInputAccessorImpl>(root, std::move(allowedPaths), std::move(makeNotAllowedError));
|
return make_ref<FSInputAccessor>(root);
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<FSInputAccessor> makeStorePathAccessor(
|
ref<InputAccessor> makeStorePathAccessor(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const StorePath & storePath,
|
const StorePath & storePath)
|
||||||
MakeNotAllowedError && makeNotAllowedError)
|
|
||||||
{
|
{
|
||||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError));
|
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath getUnfilteredRootPath(CanonPath path)
|
SourcePath getUnfilteredRootPath(CanonPath path)
|
||||||
|
|
|
@ -7,26 +7,12 @@ namespace nix {
|
||||||
class StorePath;
|
class StorePath;
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
struct FSInputAccessor : InputAccessor
|
ref<InputAccessor> makeFSInputAccessor(
|
||||||
{
|
const CanonPath & root);
|
||||||
virtual void checkAllowed(const CanonPath & absPath) = 0;
|
|
||||||
|
|
||||||
virtual void allowPath(CanonPath path) = 0;
|
ref<InputAccessor> makeStorePathAccessor(
|
||||||
|
|
||||||
virtual bool hasAccessControl() = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
|
||||||
|
|
||||||
ref<FSInputAccessor> makeFSInputAccessor(
|
|
||||||
const CanonPath & root,
|
|
||||||
std::optional<std::set<CanonPath>> && allowedPaths = {},
|
|
||||||
MakeNotAllowedError && makeNotAllowedError = {});
|
|
||||||
|
|
||||||
ref<FSInputAccessor> makeStorePathAccessor(
|
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const StorePath & storePath,
|
const StorePath & storePath);
|
||||||
MakeNotAllowedError && makeNotAllowedError = {});
|
|
||||||
|
|
||||||
SourcePath getUnfilteredRootPath(CanonPath path);
|
SourcePath getUnfilteredRootPath(CanonPath path);
|
||||||
|
|
||||||
|
|
|
@ -94,7 +94,7 @@ Hash toHash(const git_oid & oid)
|
||||||
#ifdef GIT_EXPERIMENTAL_SHA256
|
#ifdef GIT_EXPERIMENTAL_SHA256
|
||||||
assert(oid.type == GIT_OID_SHA1);
|
assert(oid.type == GIT_OID_SHA1);
|
||||||
#endif
|
#endif
|
||||||
Hash hash(htSHA1);
|
Hash hash(HashAlgorithm::SHA1);
|
||||||
memcpy(hash.hash, oid.id, hash.hashSize);
|
memcpy(hash.hash, oid.id, hash.hashSize);
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
@ -594,7 +594,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
||||||
for (const fetchers::PublicKey & k : publicKeys){
|
for (const fetchers::PublicKey & k : publicKeys){
|
||||||
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
||||||
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
||||||
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
||||||
re += "(" + escaped_fingerprint + ")";
|
re += "(" + escaped_fingerprint + ")";
|
||||||
}
|
}
|
||||||
|
@ -612,7 +612,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
|
fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
|
||||||
|
|
||||||
if (auto res = fetchers::getCache()->lookup(cacheKey))
|
if (auto res = fetchers::getCache()->lookup(cacheKey))
|
||||||
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), htSHA256);
|
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
|
||||||
|
|
||||||
auto narHash = accessor->hashPath(CanonPath::root);
|
auto narHash = accessor->hashPath(CanonPath::root);
|
||||||
|
|
||||||
|
@ -725,7 +725,7 @@ struct GitInputAccessor : InputAccessor
|
||||||
return toHash(*git_tree_entry_id(entry));
|
return toHash(*git_tree_entry_id(entry));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::map<CanonPath, TreeEntry> lookupCache;
|
std::unordered_map<CanonPath, TreeEntry> lookupCache;
|
||||||
|
|
||||||
/* Recursively look up 'path' relative to the root. */
|
/* Recursively look up 'path' relative to the root. */
|
||||||
git_tree_entry * lookup(const CanonPath & path)
|
git_tree_entry * lookup(const CanonPath & path)
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
#include "processes.hh"
|
#include "processes.hh"
|
||||||
#include "git.hh"
|
#include "git.hh"
|
||||||
#include "fs-input-accessor.hh"
|
#include "fs-input-accessor.hh"
|
||||||
|
#include "filtering-input-accessor.hh"
|
||||||
#include "mounted-input-accessor.hh"
|
#include "mounted-input-accessor.hh"
|
||||||
#include "git-utils.hh"
|
#include "git-utils.hh"
|
||||||
#include "logging.hh"
|
#include "logging.hh"
|
||||||
|
@ -52,7 +53,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
||||||
Path getCachePath(std::string_view key)
|
Path getCachePath(std::string_view key)
|
||||||
{
|
{
|
||||||
return getCacheDir() + "/nix/gitv3/" +
|
return getCacheDir() + "/nix/gitv3/" +
|
||||||
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
|
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the name of the HEAD branch.
|
// Returns the name of the HEAD branch.
|
||||||
|
@ -369,7 +370,7 @@ struct GitInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||||
{
|
{
|
||||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
|
||||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -559,7 +560,7 @@ struct GitInputScheme : InputScheme
|
||||||
repoInfo.url
|
repoInfo.url
|
||||||
);
|
);
|
||||||
} else
|
} else
|
||||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
|
||||||
|
|
||||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||||
}
|
}
|
||||||
|
@ -639,7 +640,10 @@ struct GitInputScheme : InputScheme
|
||||||
repoInfo.workdirInfo.files.insert(submodule.path);
|
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||||
|
|
||||||
ref<InputAccessor> accessor =
|
ref<InputAccessor> accessor =
|
||||||
makeFSInputAccessor(CanonPath(repoInfo.url), repoInfo.workdirInfo.files, makeNotAllowedError(repoInfo.url));
|
AllowListInputAccessor::create(
|
||||||
|
makeFSInputAccessor(CanonPath(repoInfo.url)),
|
||||||
|
std::move(repoInfo.workdirInfo.files),
|
||||||
|
makeNotAllowedError(repoInfo.url));
|
||||||
|
|
||||||
/* If the repo has submodules, return a mounted input accessor
|
/* If the repo has submodules, return a mounted input accessor
|
||||||
consisting of the accessor for the top-level repo and the
|
consisting of the accessor for the top-level repo and the
|
||||||
|
|
|
@ -43,7 +43,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
auto size = path.size();
|
auto size = path.size();
|
||||||
if (size == 3) {
|
if (size == 3) {
|
||||||
if (std::regex_match(path[2], revRegex))
|
if (std::regex_match(path[2], revRegex))
|
||||||
rev = Hash::parseAny(path[2], htSHA1);
|
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
|
||||||
else if (std::regex_match(path[2], refRegex))
|
else if (std::regex_match(path[2], refRegex))
|
||||||
ref = path[2];
|
ref = path[2];
|
||||||
else
|
else
|
||||||
|
@ -69,7 +69,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
if (name == "rev") {
|
if (name == "rev") {
|
||||||
if (rev)
|
if (rev)
|
||||||
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
|
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
|
||||||
rev = Hash::parseAny(value, htSHA1);
|
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||||
}
|
}
|
||||||
else if (name == "ref") {
|
else if (name == "ref") {
|
||||||
if (!std::regex_match(value, refRegex))
|
if (!std::regex_match(value, refRegex))
|
||||||
|
@ -323,9 +323,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false, headers).storePath)));
|
downloadFile(store, url, "source", false, headers).storePath)));
|
||||||
|
|
||||||
return RefInfo {
|
return RefInfo {
|
||||||
.rev = Hash::parseAny(std::string { json["sha"] }, htSHA1),
|
.rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1),
|
||||||
.treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, htSHA1)
|
.treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, HashAlgorithm::SHA1)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -396,8 +397,9 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false, headers).storePath)));
|
downloadFile(store, url, "source", false, headers).storePath)));
|
||||||
|
|
||||||
return RefInfo {
|
return RefInfo {
|
||||||
.rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1)
|
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -489,7 +491,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||||
|
|
||||||
return RefInfo {
|
return RefInfo {
|
||||||
.rev = Hash::parseAny(*id, htSHA1)
|
.rev = Hash::parseAny(*id, HashAlgorithm::SHA1)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme
|
||||||
if (path.size() == 1) {
|
if (path.size() == 1) {
|
||||||
} else if (path.size() == 2) {
|
} else if (path.size() == 2) {
|
||||||
if (std::regex_match(path[1], revRegex))
|
if (std::regex_match(path[1], revRegex))
|
||||||
rev = Hash::parseAny(path[1], htSHA1);
|
rev = Hash::parseAny(path[1], HashAlgorithm::SHA1);
|
||||||
else if (std::regex_match(path[1], refRegex))
|
else if (std::regex_match(path[1], refRegex))
|
||||||
ref = path[1];
|
ref = path[1];
|
||||||
else
|
else
|
||||||
|
@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme
|
||||||
ref = path[1];
|
ref = path[1];
|
||||||
if (!std::regex_match(path[2], revRegex))
|
if (!std::regex_match(path[2], revRegex))
|
||||||
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
||||||
rev = Hash::parseAny(path[2], htSHA1);
|
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
|
||||||
} else
|
} else
|
||||||
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
||||||
|
|
||||||
|
|
|
@ -44,8 +44,8 @@ StorePath InputAccessor::fetchToStore(
|
||||||
|
|
||||||
auto storePath =
|
auto storePath =
|
||||||
settings.readOnlyMode
|
settings.readOnlyMode
|
||||||
? store->computeStorePathFromDump(*source, name, method, htSHA256).first
|
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
|
||||||
: store->addToStoreFromDump(*source, name, method, htSHA256, repair);
|
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
|
||||||
|
|
||||||
if (cacheKey)
|
if (cacheKey)
|
||||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||||
|
@ -53,11 +53,6 @@ StorePath InputAccessor::fetchToStore(
|
||||||
return storePath;
|
return storePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath InputAccessor::root()
|
|
||||||
{
|
|
||||||
return {ref(shared_from_this()), CanonPath::root};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
||||||
{
|
{
|
||||||
str << path.to_string();
|
str << path.to_string();
|
||||||
|
@ -88,7 +83,7 @@ SourcePath SourcePath::parent() const
|
||||||
|
|
||||||
SourcePath SourcePath::resolveSymlinks() const
|
SourcePath SourcePath::resolveSymlinks() const
|
||||||
{
|
{
|
||||||
auto res = accessor->root();
|
auto res = SourcePath(accessor);
|
||||||
|
|
||||||
int linksAllowed = 1024;
|
int linksAllowed = 1024;
|
||||||
|
|
||||||
|
|
|
@ -36,8 +36,6 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<Inpu
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||||
PathFilter * filter = nullptr,
|
PathFilter * filter = nullptr,
|
||||||
RepairFlag repair = NoRepair);
|
RepairFlag repair = NoRepair);
|
||||||
|
|
||||||
SourcePath root();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -51,6 +49,11 @@ struct SourcePath
|
||||||
ref<InputAccessor> accessor;
|
ref<InputAccessor> accessor;
|
||||||
CanonPath path;
|
CanonPath path;
|
||||||
|
|
||||||
|
SourcePath(ref<InputAccessor> accessor, CanonPath path = CanonPath::root)
|
||||||
|
: accessor(std::move(accessor))
|
||||||
|
, path(std::move(path))
|
||||||
|
{ }
|
||||||
|
|
||||||
std::string_view baseName() const;
|
std::string_view baseName() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -127,7 +130,7 @@ struct SourcePath
|
||||||
{ return accessor->getPhysicalPath(path); }
|
{ return accessor->getPhysicalPath(path); }
|
||||||
|
|
||||||
std::string to_string() const
|
std::string to_string() const
|
||||||
{ return path.abs(); }
|
{ return accessor->showPath(path); }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Append a `CanonPath` to this path.
|
* Append a `CanonPath` to this path.
|
||||||
|
|
|
@ -210,7 +210,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
return files.count(file);
|
return files.count(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
|
||||||
|
|
||||||
return {std::move(storePath), input};
|
return {std::move(storePath), input};
|
||||||
}
|
}
|
||||||
|
@ -220,7 +220,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
|
|
||||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||||
{
|
{
|
||||||
if (hash.has_value() && hash->type != htSHA1)
|
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
|
||||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -260,14 +260,14 @@ struct MercurialInputScheme : InputScheme
|
||||||
});
|
});
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
|
||||||
if (!input.getRev() || input.getRev() == rev2) {
|
if (!input.getRev() || input.getRev() == rev2) {
|
||||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
|
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
||||||
|
|
||||||
/* If this is a commit hash that we already have, we don't
|
/* If this is a commit hash that we already have, we don't
|
||||||
have to pull again. */
|
have to pull again. */
|
||||||
|
@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
||||||
assert(tokens.size() == 3);
|
assert(tokens.size() == 3);
|
||||||
|
|
||||||
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
|
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
|
||||||
auto revCount = std::stoull(tokens[1]);
|
auto revCount = std::stoull(tokens[1]);
|
||||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||||
|
|
||||||
|
|
|
@ -73,7 +73,7 @@ DownloadFileResult downloadFile(
|
||||||
} else {
|
} else {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(res.data, sink);
|
dumpString(res.data, sink);
|
||||||
auto hash = hashString(htSHA256, res.data);
|
auto hash = hashString(HashAlgorithm::SHA256, res.data);
|
||||||
ValidPathInfo info {
|
ValidPathInfo info {
|
||||||
*store,
|
*store,
|
||||||
name,
|
name,
|
||||||
|
@ -82,7 +82,7 @@ DownloadFileResult downloadFile(
|
||||||
.hash = hash,
|
.hash = hash,
|
||||||
.references = {},
|
.references = {},
|
||||||
},
|
},
|
||||||
hashString(htSHA256, sink.s),
|
hashString(HashAlgorithm::SHA256, sink.s),
|
||||||
};
|
};
|
||||||
info.narSize = sink.s.size();
|
info.narSize = sink.s.size();
|
||||||
auto source = StringSource { sink.s };
|
auto source = StringSource { sink.s };
|
||||||
|
@ -156,7 +156,7 @@ DownloadTarballResult downloadTarball(
|
||||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||||
lastModified = lstat(topDir).st_mtime;
|
lastModified = lstat(topDir).st_mtime;
|
||||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
|
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
|
|
|
@ -14,4 +14,4 @@ libmain_LIBS = libstore libutil
|
||||||
|
|
||||||
libmain_ALLOW_UNDEFINED = 1
|
libmain_ALLOW_UNDEFINED = 1
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue