diff --git a/.github/ISSUE_TEMPLATE/missing_documentation.md b/.github/ISSUE_TEMPLATE/missing_documentation.md index be3f6af97..cf663e28d 100644 --- a/.github/ISSUE_TEMPLATE/missing_documentation.md +++ b/.github/ISSUE_TEMPLATE/missing_documentation.md @@ -23,7 +23,7 @@ assignees: '' - [ ] checked [open documentation issues and pull requests] for possible duplicates [latest Nix manual]: https://nixos.org/manual/nix/unstable/ -[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src +[source]: https://github.com/NixOS/nix/tree/master/doc/manual/source [open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation ## Priorities diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index d12a4d36c..69da87db7 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,3 +1,22 @@ + + # Motivation diff --git a/.github/labeler.yml b/.github/labeler.yml index 0e6fd3e26..97ca9284d 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,7 +1,7 @@ "c api": - changed-files: - any-glob-to-any-file: "src/lib*-c/**/*" - - any-glob-to-any-file: "test/unit/**/nix_api_*" + - any-glob-to-any-file: "src/*test*/**/nix_api_*" - any-glob-to-any-file: "doc/external-api/**/*" "contributor-experience": @@ -9,7 +9,7 @@ - any-glob-to-any-file: "CONTRIBUTING.md" - any-glob-to-any-file: ".github/ISSUE_TEMPLATE/*" - any-glob-to-any-file: ".github/PULL_REQUEST_TEMPLATE.md" - - any-glob-to-any-file: "doc/manual/src/contributing/**" + - any-glob-to-any-file: "doc/manual/source/contributing/**" "documentation": - changed-files: diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml deleted file mode 100644 index dd110de6c..000000000 --- a/.github/workflows/backport.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Backport -on: - pull_request_target: - types: [closed, labeled] -permissions: - contents: read -jobs: - backport: - name: Backport Pull Request - permissions: - # for zeebe-io/backport-action - contents: write - pull-requests: write - if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - # required to find all branches - fetch-depth: 0 - - name: Create backport PRs - # should be kept in sync with `version` - uses: zeebe-io/backport-action@v3.0.2 - with: - # Config README: https://github.com/zeebe-io/backport-action#backport-action - github_token: ${{ secrets.GITHUB_TOKEN }} - github_workspace: ${{ github.workspace }} - pull_description: |- - Automatic backport to `${target_branch}`, triggered by a label in #${pull_number}. - # should be kept in sync with `uses` - version: v0.0.5 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3463335b9..27f60574e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@V27 + - uses: cachix/install-nix-action@v30 with: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" @@ -89,7 +89,7 @@ jobs: with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@V27 + - uses: cachix/install-nix-action@v30 with: install_url: https://releases.nixos.org/nix/nix-2.20.3/install - uses: cachix/cachix-action@v15 @@ -112,7 +112,7 @@ jobs: steps: - uses: actions/checkout@v4 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@V27 + - uses: cachix/install-nix-action@v30 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" @@ -142,11 +142,11 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@V27 + - uses: cachix/install-nix-action@v30 with: install_url: https://releases.nixos.org/nix/nix-2.20.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV + - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV - uses: cachix/cachix-action@v15 if: needs.check_secrets.outputs.cachix == 'true' with: @@ -214,4 +214,4 @@ jobs: path: flake-regressions/tests - uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/magic-nix-cache-action@main - - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH scripts/flake-regressions.sh + - run: nix build --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh diff --git a/.gitignore b/.gitignore index a17b627f4..11a80ab5b 100644 --- a/.gitignore +++ b/.gitignore @@ -23,17 +23,17 @@ perl/Makefile.config /doc/manual/conf-file.json /doc/manual/language.json /doc/manual/xp-features.json -/doc/manual/src/SUMMARY.md -/doc/manual/src/SUMMARY-rl-next.md -/doc/manual/src/store/types/* -!/doc/manual/src/store/types/index.md.in -/doc/manual/src/command-ref/new-cli -/doc/manual/src/command-ref/conf-file.md -/doc/manual/src/command-ref/experimental-features-shortlist.md -/doc/manual/src/contributing/experimental-feature-descriptions.md -/doc/manual/src/language/builtins.md -/doc/manual/src/language/builtin-constants.md -/doc/manual/src/release-notes/rl-next.md +/doc/manual/source/SUMMARY.md +/doc/manual/source/SUMMARY-rl-next.md +/doc/manual/source/store/types/* +!/doc/manual/source/store/types/index.md.in +/doc/manual/source/command-ref/new-cli +/doc/manual/source/command-ref/conf-file.md +/doc/manual/source/command-ref/experimental-features-shortlist.md +/doc/manual/source/contributing/experimental-feature-descriptions.md +/doc/manual/source/language/builtins.md +/doc/manual/source/language/builtin-constants.md +/doc/manual/source/release-notes/rl-next.md # /scripts/ /scripts/nix-profile.sh @@ -49,22 +49,22 @@ perl/Makefile.config /src/libexpr/parser-tab.output /src/libexpr/nix.tbl /src/libexpr/tests -/tests/unit/libexpr/libnixexpr-tests +/src/libexpr-tests/libnixexpr-tests # /src/libfetchers -/tests/unit/libfetchers/libnixfetchers-tests +/src/libfetchers-tests/libnixfetchers-tests # /src/libflake -/tests/unit/libflake/libnixflake-tests +/src/libflake-tests/libnixflake-tests # /src/libstore/ *.gen.* /src/libstore/tests -/tests/unit/libstore/libnixstore-tests +/src/libstore-tests/libnixstore-tests # /src/libutil/ /src/libutil/tests -/tests/unit/libutil/libnixutil-tests +/src/libutil-tests/libnixutil-tests /src/nix/nix diff --git a/.mergify.yml b/.mergify.yml new file mode 100644 index 000000000..c297d3d5e --- /dev/null +++ b/.mergify.yml @@ -0,0 +1,92 @@ +queue_rules: + - name: default + # all required tests need to go here + merge_conditions: + - check-success=installer + - check-success=installer_test (macos-latest) + - check-success=installer_test (ubuntu-latest) + - check-success=tests (macos-latest) + - check-success=tests (ubuntu-latest) + - check-success=vm_tests + merge_method: rebase + batch_size: 5 + +pull_request_rules: + - name: merge using the merge queue + conditions: + - base~=master|.+-maintenance + - label~=merge-queue|dependencies + actions: + queue: {} + +# The rules below will first create backport pull requests and put those in a merge queue. + + - name: backport patches to 2.18 + conditions: + - label=backport 2.18-maintenance + actions: + backport: + branches: + - 2.18-maintenance + labels: + - merge-queue + + - name: backport patches to 2.19 + conditions: + - label=backport 2.19-maintenance + actions: + backport: + branches: + - 2.19-maintenance + labels: + - merge-queue + + - name: backport patches to 2.20 + conditions: + - label=backport 2.20-maintenance + actions: + backport: + branches: + - 2.20-maintenance + labels: + - merge-queue + + - name: backport patches to 2.21 + conditions: + - label=backport 2.21-maintenance + actions: + backport: + branches: + - 2.21-maintenance + labels: + - merge-queue + + - name: backport patches to 2.22 + conditions: + - label=backport 2.22-maintenance + actions: + backport: + branches: + - 2.22-maintenance + labels: + - merge-queue + + - name: backport patches to 2.23 + conditions: + - label=backport 2.23-maintenance + actions: + backport: + branches: + - 2.23-maintenance + labels: + - merge-queue + + - name: backport patches to 2.24 + conditions: + - label=backport 2.24-maintenance + actions: + backport: + branches: + - "2.24-maintenance" + labels: + - merge-queue diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index da56653b8..ad8678962 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -79,7 +79,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). - Functional tests – [`tests/functional/**.sh`](./tests/functional) - Unit tests – [`src/*/tests`](./src/) - Integration tests – [`tests/nixos/*`](./tests/nixos) - - [ ] User documentation in the [manual](./doc/manual/src) + - [ ] User documentation in the [manual](./doc/manual/source) - [ ] API documentation in header files - [ ] Code and comments are self-explanatory - [ ] Commit message explains **why** the change was made @@ -90,11 +90,11 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). ## Making changes to the Nix manual The Nix reference manual is hosted on https://nixos.org/manual/nix. -The underlying source files are located in [`doc/manual/src`](./doc/manual/src). +The underlying source files are located in [`doc/manual/source`](./doc/manual/source). For small changes you can [use GitHub to edit these files](https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files) For larger changes see the [Nix reference manual](https://nix.dev/manual/nix/development/development/contributing.html). ## Getting help Whenever you're stuck or do not know how to proceed, you can always ask for help. -The appropriate channels to do so can be found on the [NixOS Community](https://nixos.org/community/) page. +We invite you to use our [Matrix room](https://matrix.to/#/#nix-dev:nixos.org) to ask questions. diff --git a/HACKING.md b/HACKING.md new file mode 120000 index 000000000..02971da3e --- /dev/null +++ b/HACKING.md @@ -0,0 +1 @@ +doc/manual/source/development/building.md \ No newline at end of file diff --git a/Makefile b/Makefile index b51ae6cc7..ee1a0de31 100644 --- a/Makefile +++ b/Makefile @@ -38,6 +38,18 @@ makefiles += \ endif endif +ifeq ($(ENABLE_UNIT_TESTS), yes) +makefiles += \ + src/libutil-tests/local.mk \ + src/libutil-test-support/local.mk \ + src/libstore-tests/local.mk \ + src/libstore-test-support/local.mk \ + src/libfetchers-tests/local.mk \ + src/libexpr-tests/local.mk \ + src/libexpr-test-support/local.mk \ + src/libflake-tests/local.mk +endif + ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes) ifdef HOST_UNIX makefiles += \ @@ -79,6 +91,7 @@ ifdef HOST_WINDOWS # # TODO do not do this, and instead do fine-grained export annotations. GLOBAL_LDFLAGS += -Wl,--export-all-symbols + GLOBAL_CXXFLAGS += -D_WIN32_WINNT=0x0602 endif GLOBAL_CXXFLAGS += -g -Wall -Wdeprecated-copy -Wignored-qualifiers -Wimplicit-fallthrough -Werror=unused-result -Werror=suggest-override -include $(buildprefix)config.h -std=c++2a -I src @@ -92,6 +105,13 @@ include mk/lib.mk # These must be defined after `mk/lib.mk`. Otherwise the first rule # incorrectly becomes the default target. +ifneq ($(ENABLE_UNIT_TESTS), yes) +.PHONY: check +check: + @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'." + @exit 1 +endif + ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes) .PHONY: installcheck installcheck: diff --git a/Makefile.config.in b/Makefile.config.in index e131484f6..3100d2073 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -12,6 +12,7 @@ ENABLE_BUILD = @ENABLE_BUILD@ ENABLE_DOC_GEN = @ENABLE_DOC_GEN@ ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@ ENABLE_S3 = @ENABLE_S3@ +ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@ GTEST_LIBS = @GTEST_LIBS@ HAVE_LIBCPUID = @HAVE_LIBCPUID@ HAVE_SECCOMP = @HAVE_SECCOMP@ diff --git a/build-utils-meson/export/meson.build b/build-utils-meson/export/meson.build index 40f6dcd59..9f5950572 100644 --- a/build-utils-meson/export/meson.build +++ b/build-utils-meson/export/meson.build @@ -10,6 +10,7 @@ foreach dep : deps_public_subproject endforeach requires_public += deps_public +extra_pkg_config_variables = get_variable('extra_pkg_config_variables', {}) import('pkgconfig').generate( this_library, filebase : meson.project_name(), @@ -20,6 +21,7 @@ import('pkgconfig').generate( requires : requires_public, requires_private : requires_private, libraries_private : libraries_private, + variables : extra_pkg_config_variables, ) meson.override_dependency(meson.project_name(), declare_dependency( @@ -27,4 +29,5 @@ meson.override_dependency(meson.project_name(), declare_dependency( link_with : this_library, compile_args : ['-std=c++2a'], dependencies : deps_public_subproject + deps_public, + variables : extra_pkg_config_variables, )) diff --git a/build-utils-meson/libatomic/meson.build b/build-utils-meson/libatomic/meson.build new file mode 100644 index 000000000..d16d23817 --- /dev/null +++ b/build-utils-meson/libatomic/meson.build @@ -0,0 +1,8 @@ + +# Check if -latomic is needed +# This is needed for std::atomic on some platforms +# We did not manage to test this reliably on all platforms, so we hardcode +# it for now. +if host_machine.cpu_family() == 'arm' + deps_other += cxx.find_library('atomic') +endif diff --git a/build-utils-meson/windows-version/meson.build b/build-utils-meson/windows-version/meson.build new file mode 100644 index 000000000..3a008e5df --- /dev/null +++ b/build-utils-meson/windows-version/meson.build @@ -0,0 +1,6 @@ +if host_machine.system() == 'windows' + # https://learn.microsoft.com/en-us/cpp/porting/modifying-winver-and-win32-winnt?view=msvc-170 + # #define _WIN32_WINNT_WIN8 0x0602 + # We currently don't use any API which requires higher than this. + add_project_arguments([ '-D_WIN32_WINNT=0x0602' ], language: 'cpp') +endif diff --git a/configure.ac b/configure.ac index 18d718c07..4df5c80f0 100644 --- a/configure.ac +++ b/configure.ac @@ -62,12 +62,16 @@ AC_CHECK_TOOL([AR], [ar]) AC_SYS_LARGEFILE -# Solaris-specific stuff. +# OS-specific stuff. case "$host_os" in solaris*) # Solaris requires -lsocket -lnsl for network functions LDFLAGS="-lsocket -lnsl $LDFLAGS" ;; + darwin*) + # Need to link to libsandbox. + LDFLAGS="-lsandbox $LDFLAGS" + ;; esac @@ -86,12 +90,13 @@ static char buf[1024];]], AC_LANG_POP(C++) -AC_CHECK_FUNCS([statvfs pipe2]) +AC_CHECK_FUNCS([statvfs pipe2 close_range]) -# Check for lutimes, optionally used for changing the mtime of -# symlinks. -AC_CHECK_FUNCS([lutimes]) +# Check for lutimes and utimensat, optionally used for changing the +# mtime of symlinks. +AC_CHECK_DECLS([AT_SYMLINK_NOFOLLOW], [], [], [[#include ]]) +AC_CHECK_FUNCS([lutimes utimensat]) # Check whether the store optimiser can optimise symlinks. @@ -141,6 +146,18 @@ AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) AC_SUBST(ENABLE_BUILD) +# Building without unit tests is useful for bootstrapping with a smaller footprint +# or running the tests in a separate derivation. Otherwise, we do compile and +# run them. + +AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]), + ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD) +AC_SUBST(ENABLE_UNIT_TESTS) + +AS_IF( + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"], + [AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])]) + AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]), ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes) AC_SUBST(ENABLE_FUNCTIONAL_TESTS) @@ -158,6 +175,10 @@ AS_IF( [test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"], [NEED_PROG(jq, jq)]) +AS_IF( + [test "$ENABLE_DOC_GEN" == "yes"], + [NEED_PROG(man, man)]) + AS_IF([test "$ENABLE_BUILD" == "yes"],[ # Look for boost, a required dependency. @@ -346,6 +367,16 @@ if test "$gc" = yes; then CFLAGS="$old_CFLAGS" fi +AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ + +# Look for gtest. +PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main]) + +# Look for rapidcheck. +PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest]) + +]) + # Look for nlohmann/json. PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) diff --git a/doc/manual/.version b/doc/manual/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/doc/manual/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/doc/manual/book.toml b/doc/manual/book.toml index 73fb7e75e..213739174 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -1,5 +1,6 @@ [book] title = "Nix Reference Manual" +src = "source" [output.html] additional-css = ["custom.css"] @@ -7,9 +8,21 @@ additional-js = ["redirects.js"] edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/NixOS/nix" +# Handles replacing @docroot@ with a path to ./source relative to that markdown file, +# {{#include handlebars}}, and the @generated@ syntax used within these. it mostly +# but not entirely replaces the links preprocessor (which we cannot simply use due +# to @generated@ files living in a different directory to make meson happy). we do +# not want to disable the links preprocessor entirely though because that requires +# disabling *all* built-in preprocessors and selectively reenabling those we want. +[preprocessor.substitute] +command = "python3 ./substitute.py" +before = ["anchors", "links"] + [preprocessor.anchors] renderers = ["html"] -command = "jq --from-file doc/manual/anchors.jq" +command = "jq --from-file ./anchors.jq" + +[output.markdown] [output.linkcheck] # no Internet during the build (in the sandbox) diff --git a/doc/manual/generate-deps.py b/doc/manual/generate-deps.py new file mode 100755 index 000000000..297bd3939 --- /dev/null +++ b/doc/manual/generate-deps.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 + +import glob +import sys + +# meson expects makefile-style dependency declarations, i.e. +# +# target: dependency... +# +# meson seems to pass depfiles straight on to ninja even though +# it also parses the file itself (or at least has code to do so +# in its tree), so we must live by ninja's rules: only slashes, +# spaces and octothorpes can be escaped, anything else is taken +# literally. since the rules for these aren't even the same for +# all three we will just fail when we encounter any of them (if +# asserts are off for some reason the depfile will likely point +# to nonexistant paths, making everything phony and thus fine.) +for path in glob.glob(sys.argv[1] + '/**', recursive=True): + assert '\\' not in path + assert ' ' not in path + assert '#' not in path + print("ignored:", path) diff --git a/doc/manual/generate-store-types.nix b/doc/manual/generate-store-types.nix index 3b78a0e1b..46179abc5 100644 --- a/doc/manual/generate-store-types.nix +++ b/doc/manual/generate-store-types.nix @@ -21,7 +21,7 @@ let "index.md" = replaceStrings [ "@store-types@" ] [ index ] - (readFile ./src/store/types/index.md.in); + (readFile ./source/store/types/index.md.in); tableOfContents = let diff --git a/doc/manual/local.mk b/doc/manual/local.mk index fcc50f460..36cccc506 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -4,8 +4,8 @@ doc_nix = $(nix_PATH) MANUAL_SRCS := \ - $(call rwildcard, $(d)/src, *.md) \ - $(call rwildcard, $(d)/src, */*.md) + $(call rwildcard, $(d)/source, *.md) \ + $(call rwildcard, $(d)/source, */*.md) man-pages := $(foreach n, \ nix-env.1 nix-store.1 \ @@ -18,11 +18,11 @@ man-pages := $(foreach n, \ , $(d)/$(n)) # man pages for subcommands -# convert from `$(d)/src/command-ref/nix-{1}/{2}.md` to `$(d)/nix-{1}-{2}.1` +# convert from `$(d)/source/command-ref/nix-{1}/{2}.md` to `$(d)/nix-{1}-{2}.1` # FIXME: unify with how nix3-cli man pages are generated man-pages += $(foreach subcommand, \ - $(filter-out %opt-common.md %env-common.md, $(wildcard $(d)/src/command-ref/nix-*/*.md)), \ - $(d)/$(subst /,-,$(subst $(d)/src/command-ref/,,$(subst .md,.1,$(subcommand))))) + $(filter-out %opt-common.md %env-common.md, $(wildcard $(d)/source/command-ref/nix-*/*.md)), \ + $(d)/$(subst /,-,$(subst $(d)/source/command-ref/,,$(subst .md,.1,$(subcommand))))) clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8 @@ -49,11 +49,11 @@ define process-includes done < <(grep '{{#include' $(1)) endef -$(d)/nix-env-%.1: $(d)/src/command-ref/nix-env/%.md +$(d)/nix-env-%.1: $(d)/source/command-ref/nix-env/%.md @printf "Title: %s\n\n" "$(subst nix-env-,nix-env --,$$(basename "$@" .1))" > $^.tmp $(render-subcommand) -$(d)/nix-store-%.1: $(d)/src/command-ref/nix-store/%.md +$(d)/nix-store-%.1: $(d)/source/command-ref/nix-store/%.md @printf -- 'Title: %s\n\n' "$(subst nix-store-,nix-store --,$$(basename "$@" .1))" > $^.tmp $(render-subcommand) @@ -69,50 +69,50 @@ define render-subcommand endef -$(d)/%.1: $(d)/src/command-ref/%.md +$(d)/%.1: $(d)/source/command-ref/%.md @printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp @cat $^ >> $^.tmp @$(call process-includes,$^,$^.tmp) $(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@ @rm $^.tmp -$(d)/%.8: $(d)/src/command-ref/%.md +$(d)/%.8: $(d)/source/command-ref/%.md @printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp @cat $^ >> $^.tmp $(trace-gen) lowdown -sT man --nroff-nolinks -M section=8 $^.tmp -o $@ @rm $^.tmp -$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md +$(d)/nix.conf.5: $(d)/source/command-ref/conf-file.md @printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp @cat $^ >> $^.tmp @$(call process-includes,$^,$^.tmp) $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@ @rm $^.tmp -$(d)/nix-profiles.5: $(d)/src/command-ref/files/profiles.md +$(d)/nix-profiles.5: $(d)/source/command-ref/files/profiles.md @printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp @cat $^ >> $^.tmp $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@ @rm $^.tmp -$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/SUMMARY-rl-next.md $(d)/src/store/types $(d)/src/command-ref/new-cli $(d)/src/development/experimental-feature-descriptions.md +$(d)/source/SUMMARY.md: $(d)/source/SUMMARY.md.in $(d)/source/SUMMARY-rl-next.md $(d)/source/store/types $(d)/source/command-ref/new-cli $(d)/source/development/experimental-feature-descriptions.md @cp $< $@ @$(call process-includes,$@,$@) -$(d)/src/store/types: $(d)/nix.json $(d)/utils.nix $(d)/generate-store-info.nix $(d)/generate-store-types.nix $(d)/src/store/types/index.md.in $(doc_nix) +$(d)/source/store/types: $(d)/nix.json $(d)/utils.nix $(d)/generate-store-info.nix $(d)/generate-store-types.nix $(d)/source/store/types/index.md.in $(doc_nix) @# FIXME: build out of tree! @rm -rf $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-store-types.nix (builtins.fromJSON (builtins.readFile $<)).stores' @# do not destroy existing contents @mv $@.tmp/* $@/ -$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(doc_nix) +$(d)/source/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(doc_nix) @rm -rf $@ $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)' @mv $@.tmp $@ -$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(doc_nix) - @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp +$(d)/source/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/source/command-ref/conf-file-prefix.md $(d)/source/command-ref/experimental-features-shortlist.md $(doc_nix) + @cat doc/manual/source/command-ref/conf-file-prefix.md > $@.tmp $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "conf"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; @mv $@.tmp $@ @@ -124,12 +124,12 @@ $(d)/conf-file.json: $(doc_nix) $(trace-gen) $(dummy-env) $(doc_nix) config show --json --experimental-features nix-command > $@.tmp @mv $@.tmp $@ -$(d)/src/development/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix) +$(d)/source/development/experimental-feature-descriptions.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix) @rm -rf $@ $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))' @mv $@.tmp $@ -$(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(doc_nix) +$(d)/source/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(doc_nix) @rm -rf $@ $@.tmp $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))' @mv $@.tmp $@ @@ -138,10 +138,10 @@ $(d)/xp-features.json: $(doc_nix) $(trace-gen) $(dummy-env) $(doc_nix) __dump-xp-features > $@.tmp @mv $@.tmp $@ -$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(doc_nix) - @cat doc/manual/src/language/builtins-prefix.md > $@.tmp +$(d)/source/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/source/language/builtins-prefix.md $(doc_nix) + @cat doc/manual/source/language/builtins-prefix.md > $@.tmp $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; - @cat doc/manual/src/language/builtins-suffix.md >> $@.tmp + @cat doc/manual/source/language/builtins-suffix.md >> $@.tmp @mv $@.tmp $@ $(d)/language.json: $(doc_nix) @@ -149,7 +149,7 @@ $(d)/language.json: $(doc_nix) @mv $@.tmp $@ # Generate "Upcoming release" notes (or clear it and remove from menu) -$(d)/src/release-notes/rl-next.md: $(d)/rl-next $(d)/rl-next/* +$(d)/source/release-notes/rl-next.md: $(d)/rl-next $(d)/rl-next/* @if type -p changelog-d > /dev/null; then \ echo " GEN " $@; \ changelog-d doc/manual/rl-next > $@; \ @@ -158,7 +158,7 @@ $(d)/src/release-notes/rl-next.md: $(d)/rl-next $(d)/rl-next/* true > $@; \ fi -$(d)/src/SUMMARY-rl-next.md: $(d)/src/release-notes/rl-next.md +$(d)/source/SUMMARY-rl-next.md: $(d)/source/release-notes/rl-next.md $(trace-gen) true @if [ -s $< ]; then \ echo ' - [Upcoming release](release-notes/rl-next.md)' > $@; \ @@ -194,9 +194,9 @@ $(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages @mkdir -p $(DESTDIR)$$(dirname $@) $(trace-install) install -m 0644 $$(dirname $<)/* $(DESTDIR)$$(dirname $@) -doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli +doc/manual/generated/man1/nix3-manpages: $(d)/source/command-ref/new-cli @mkdir -p $(DESTDIR)$$(dirname $@) - $(trace-gen) for i in doc/manual/src/command-ref/new-cli/*.md; do \ + $(trace-gen) for i in doc/manual/source/command-ref/new-cli/*.md; do \ name=$$(basename $$i .md); \ tmpFile=$$(mktemp); \ if [[ $$name = SUMMARY ]]; then continue; fi; \ @@ -211,7 +211,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli # `@docroot@` is to be preserved for documenting the mechanism # FIXME: maybe contributing guides should live right next to the code # instead of in the manual -$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/store/types $(d)/src/command-ref/new-cli $(d)/src/development/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/release-notes/rl-next.md $(d)/src/figures $(d)/src/favicon.png $(d)/src/favicon.svg +$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/source/SUMMARY.md $(d)/source/store/types $(d)/source/command-ref/new-cli $(d)/source/development/experimental-feature-descriptions.md $(d)/source/command-ref/conf-file.md $(d)/source/language/builtins.md $(d)/source/release-notes/rl-next.md $(d)/source/figures $(d)/source/favicon.png $(d)/source/favicon.svg $(trace-gen) \ tmp="$$(mktemp -d)"; \ cp -r doc/manual "$$tmp"; \ @@ -219,12 +219,17 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/ $(call process-includes,$$file,$$file); \ done; \ find "$$tmp" -name '*.md' ! -name 'documentation.md' | while read -r file; do \ - docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/src)"; \ + docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/source)"; \ sed -i "s,@docroot@,$$docroot,g" "$$file"; \ done; \ set -euo pipefail; \ - RUST_LOG=warn mdbook build "$$tmp/manual" -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \ - | { grep -Fv "because fragment resolution isn't implemented" || :; }; \ + ( \ + cd "$$tmp/manual"; \ + RUST_LOG=warn \ + MDBOOK_SUBSTITUTE_SEARCH=$(d)/source \ + mdbook build -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \ + | { grep -Fv "because fragment resolution isn't implemented" || :; } \ + ); \ rm -rf "$$tmp/manual" @rm -rf $(DESTDIR)$(docdir)/manual @mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual diff --git a/doc/manual/meson.build b/doc/manual/meson.build new file mode 100644 index 000000000..3630e2dc8 --- /dev/null +++ b/doc/manual/meson.build @@ -0,0 +1,353 @@ +project('nix-manual', + version : files('.version'), + meson_version : '>= 1.1', + license : 'LGPL-2.1-or-later', +) + +nix = find_program('nix', native : true) + +mdbook = find_program('mdbook', native : true) +bash = find_program('bash', native : true) + +pymod = import('python') +python = pymod.find_installation('python3') + +nix_env_for_docs = { + 'HOME': '/dummy', + 'NIX_CONF_DIR': '/dummy', + 'NIX_SSL_CERT_FILE': '/dummy/no-ca-bundle.crt', + 'NIX_STATE_DIR': '/dummy', + 'NIX_CONFIG': 'cores = 0', +} + +nix_for_docs = [nix, '--experimental-features', 'nix-command'] +nix_eval_for_docs_common = nix_for_docs + [ + 'eval', + '-I', 'nix=' + meson.current_source_dir(), + '--store', 'dummy://', + '--impure', +] +nix_eval_for_docs = nix_eval_for_docs_common + '--raw' + +conf_file_json = custom_target( + command : nix_for_docs + ['config', 'show', '--json'], + capture : true, + output : 'conf-file.json', + env : nix_env_for_docs, +) + +language_json = custom_target( + command: [nix, '__dump-language'], + output : 'language.json', + capture : true, + env : nix_env_for_docs, +) + +nix3_cli_json = custom_target( + command : [nix, '__dump-cli'], + capture : true, + output : 'nix.json', + env : nix_env_for_docs, +) + +generate_manual_deps = files( + 'generate-deps.py', +) + +# Generates types +subdir('source/store') +# Generates builtins.md and builtin-constants.md. +subdir('source/language') +# Generates new-cli pages, experimental-features-shortlist.md, and conf-file.md. +subdir('source/command-ref') +# Generates experimental-feature-descriptions.md. +subdir('source/development') +# Generates rl-next-generated.md. +subdir('source/release-notes') +subdir('source') + +# Hacky way to figure out if `nix` is an `ExternalProgram` or +# `Exectuable`. Only the latter can occur in custom target input lists. +if nix.full_path().startswith(meson.build_root()) + nix_input = nix +else + nix_input = [] +endif + +manual = custom_target( + 'manual', + command : [ + bash, + '-euo', 'pipefail', + '-c', + ''' + @0@ @INPUT0@ @CURRENT_SOURCE_DIR@ > @DEPFILE@ + @0@ @INPUT1@ summary @2@ < @CURRENT_SOURCE_DIR@/source/SUMMARY.md.in > @2@/source/SUMMARY.md + rsync -r --include='*.md' @CURRENT_SOURCE_DIR@/ @2@/ + (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 + rm -rf @2@/manual + mv @2@/html @2@/manual + find @2@/manual -iname meson.build -delete + '''.format( + python.full_path(), + mdbook.full_path(), + meson.current_build_dir(), + ), + ], + input : [ + generate_manual_deps, + 'substitute.py', + 'book.toml', + 'anchors.jq', + 'custom.css', + nix3_cli_files, + experimental_features_shortlist_md, + experimental_feature_descriptions_md, + types_dir, + conf_file_md, + builtins_md, + rl_next_generated, + summary_rl_next, + nix_input, + ], + output : [ + 'manual', + 'markdown', + ], + depfile : 'manual.d', + env : { + 'RUST_LOG': 'info', + 'MDBOOK_SUBSTITUTE_SEARCH': meson.current_build_dir() / 'source', + }, +) +manual_html = manual[0] +manual_md = manual[1] + +install_subdir( + manual_html.full_path(), + install_dir : get_option('datadir') / 'doc/nix', +) + +nix_nested_manpages = [ + [ 'nix-env', + [ + 'delete-generations', + 'install', + 'list-generations', + 'query', + 'rollback', + 'set-flag', + 'set', + 'switch-generation', + 'switch-profile', + 'uninstall', + 'upgrade', + ], + ], + [ 'nix-store', + [ + 'add-fixed', + 'add', + 'delete', + 'dump-db', + 'dump', + 'export', + 'gc', + 'generate-binary-cache-key', + 'import', + 'load-db', + 'optimise', + 'print-env', + 'query', + 'read-log', + 'realise', + 'repair-path', + 'restore', + 'serve', + 'verify', + 'verify-path', + ], + ], +] + +foreach command : nix_nested_manpages + foreach page : command[1] + title = command[0] + ' --' + page + section = '1' + custom_target( + command : [ + bash, + files('./render-manpage.sh'), + '--out-no-smarty', + title, + section, + '@INPUT0@/command-ref' / command[0] / (page + '.md'), + '@OUTPUT0@', + ], + input : [ + manual_md, + nix_input, + ], + output : command[0] + '-' + page + '.1', + install : true, + install_dir : get_option('mandir') / 'man1', + ) + endforeach +endforeach + +nix3_manpages = [ + 'nix3-build', + 'nix3-bundle', + 'nix3-config', + 'nix3-config-show', + 'nix3-copy', + 'nix3-daemon', + 'nix3-derivation-add', + 'nix3-derivation', + 'nix3-derivation-show', + 'nix3-develop', + #'nix3-doctor', + 'nix3-edit', + 'nix3-eval', + 'nix3-flake-archive', + 'nix3-flake-check', + 'nix3-flake-clone', + 'nix3-flake-info', + 'nix3-flake-init', + 'nix3-flake-lock', + 'nix3-flake', + 'nix3-flake-metadata', + 'nix3-flake-new', + 'nix3-flake-prefetch', + 'nix3-flake-show', + 'nix3-flake-update', + 'nix3-fmt', + 'nix3-hash-file', + 'nix3-hash', + 'nix3-hash-path', + 'nix3-hash-to-base16', + 'nix3-hash-to-base32', + 'nix3-hash-to-base64', + 'nix3-hash-to-sri', + 'nix3-help', + 'nix3-help-stores', + 'nix3-key-convert-secret-to-public', + 'nix3-key-generate-secret', + 'nix3-key', + 'nix3-log', + 'nix3-nar-cat', + 'nix3-nar-dump-path', + 'nix3-nar-ls', + 'nix3-nar', + 'nix3-path-info', + 'nix3-print-dev-env', + 'nix3-profile-diff-closures', + 'nix3-profile-history', + 'nix3-profile-install', + 'nix3-profile-list', + 'nix3-profile', + 'nix3-profile-remove', + 'nix3-profile-rollback', + 'nix3-profile-upgrade', + 'nix3-profile-wipe-history', + 'nix3-realisation-info', + 'nix3-realisation', + 'nix3-registry-add', + 'nix3-registry-list', + 'nix3-registry', + 'nix3-registry-pin', + 'nix3-registry-remove', + 'nix3-repl', + 'nix3-run', + 'nix3-search', + #'nix3-shell', + 'nix3-store-add-file', + 'nix3-store-add-path', + 'nix3-store-cat', + 'nix3-store-copy-log', + 'nix3-store-copy-sigs', + 'nix3-store-delete', + 'nix3-store-diff-closures', + 'nix3-store-dump-path', + 'nix3-store-gc', + 'nix3-store-ls', + 'nix3-store-make-content-addressed', + 'nix3-store', + 'nix3-store-optimise', + 'nix3-store-path-from-hash-part', + 'nix3-store-ping', + 'nix3-store-prefetch-file', + 'nix3-store-repair', + 'nix3-store-sign', + 'nix3-store-verify', + 'nix3-upgrade-nix', + 'nix3-why-depends', + 'nix', +] + +foreach page : nix3_manpages + section = '1' + custom_target( + command : [ + bash, + '@INPUT0@', + page, + section, + '@INPUT1@/command-ref/new-cli/@0@.md'.format(page), + '@OUTPUT@', + ], + input : [ + files('./render-manpage.sh'), + manual_md, + nix_input, + ], + output : page + '.1', + install : true, + install_dir : get_option('mandir') / 'man1', + ) +endforeach + +nix_manpages = [ + [ 'nix-env', 1 ], + [ 'nix-store', 1 ], + [ 'nix-build', 1 ], + [ 'nix-shell', 1 ], + [ 'nix-instantiate', 1 ], + [ 'nix-collect-garbage', 1 ], + [ 'nix-prefetch-url', 1 ], + [ 'nix-channel', 1 ], + [ 'nix-hash', 1 ], + [ 'nix-copy-closure', 1 ], + [ 'nix.conf', 5, conf_file_md.full_path() ], + [ 'nix-daemon', 8 ], + [ 'nix-profiles', 5, 'files/profiles.md' ], +] + +foreach entry : nix_manpages + title = entry[0] + # nix.conf.5 and nix-profiles.5 are based off of conf-file.md and files/profiles.md, + # rather than a stem identical to its mdbook source. + # Therefore we use an optional third element of this array to override the name pattern + md_file = entry.get(2, title + '.md') + section = entry[1].to_string() + md_file_resolved = join_paths('@INPUT1@/command-ref/', md_file) + custom_target( + command : [ + bash, + '@INPUT0@', + title, + section, + md_file_resolved, + '@OUTPUT@', + ], + input : [ + files('./render-manpage.sh'), + manual_md, + entry.get(3, []), + nix_input, + ], + output : '@0@.@1@'.format(entry[0], entry[1]), + install : true, + install_dir : get_option('mandir') / 'man@0@'.format(entry[1]), + ) +endforeach diff --git a/doc/manual/package.nix b/doc/manual/package.nix new file mode 100644 index 000000000..2e6fcede3 --- /dev/null +++ b/doc/manual/package.nix @@ -0,0 +1,71 @@ +{ lib +, mkMesonDerivation + +, meson +, ninja +, lowdown +, mdbook +, mdbook-linkcheck +, jq +, python3 +, rsync +, nix-cli + +# Configuration Options + +, version +}: + +let + inherit (lib) fileset; +in + +mkMesonDerivation (finalAttrs: { + pname = "nix-manual"; + inherit version; + + workDir = ./.; + fileset = fileset.difference + (fileset.unions [ + ../../.version + # Too many different types of files to filter for now + ../../doc/manual + ./. + ]) + # Do a blacklist instead + ../../doc/manual/package.nix; + + # TODO the man pages should probably be separate + outputs = [ "out" "man" ]; + + # Hack for sake of the dev shell + passthru.externalNativeBuildInputs = [ + meson + ninja + (lib.getBin lowdown) + mdbook + mdbook-linkcheck + jq + python3 + rsync + ]; + + nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ + nix-cli + ]; + + preConfigure = + '' + chmod u+w ./.version + echo ${finalAttrs.version} > ./.version + ''; + + postInstall = '' + mkdir -p ''$out/nix-support + echo "doc manual ''$out/share/doc/nix/manual" >> ''$out/nix-support/hydra-build-products + ''; + + meta = { + platforms = lib.platforms.all; + }; +}) diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index cb8cd18fa..dea141391 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -1,7 +1,7 @@ // redirect rules for URL fragments (client-side) to prevent link rot. // this must be done on the client side, as web servers do not see the fragment part of the URL. // it will only work with JavaScript enabled in the browser, but this is the best we can do here. -// see src/_redirects for path redirects (server-side) +// see source/_redirects for path redirects (server-side) // redirects are declared as follows: // each entry has as its key a path matching the requested URL path, relative to the mdBook document root. diff --git a/doc/manual/remove_before_wrapper.py b/doc/manual/remove_before_wrapper.py new file mode 100644 index 000000000..6da4c19b0 --- /dev/null +++ b/doc/manual/remove_before_wrapper.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 + +import os +import subprocess +import sys +import shutil +import typing as t + +def main(): + if len(sys.argv) < 4 or '--' not in sys.argv: + print("Usage: remove-before-wrapper -- ") + sys.exit(1) + + # Extract the parts + output: str = sys.argv[1] + nix_command_idx: int = sys.argv.index('--') + 1 + nix_command: t.List[str] = sys.argv[nix_command_idx:] + + output_temp: str = output + '.tmp' + + # Remove the output and temp output in case they exist + shutil.rmtree(output, ignore_errors=True) + shutil.rmtree(output_temp, ignore_errors=True) + + # Execute nix command with `--write-to` tempary output + nix_command_write_to = nix_command + ['--write-to', output_temp] + subprocess.run(nix_command_write_to, check=True) + + # Move the temporary output to the intended location + os.rename(output_temp, output) + +if __name__ == "__main__": + main() diff --git a/doc/manual/render-manpage.sh b/doc/manual/render-manpage.sh new file mode 100755 index 000000000..65a9c124e --- /dev/null +++ b/doc/manual/render-manpage.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -euo pipefail + +lowdown_args= + +if [ "$1" = --out-no-smarty ]; then + lowdown_args=--out-no-smarty + shift +fi + +[ "$#" = 4 ] || { + echo "wrong number of args passed" >&2 + exit 1 +} + +title="$1" +section="$2" +infile="$3" +outfile="$4" + +( + printf "Title: %s\n\n" "$title" + cat "$infile" +) | lowdown -sT man --nroff-nolinks $lowdown_args -M section="$section" -o "$outfile" diff --git a/doc/manual/rl-next/add-nix-state-home.md b/doc/manual/rl-next/add-nix-state-home.md new file mode 100644 index 000000000..bbfdd5d38 --- /dev/null +++ b/doc/manual/rl-next/add-nix-state-home.md @@ -0,0 +1,14 @@ +--- +synopsis: Use envvars NIX_CACHE_HOME, NIX_CONFIG_HOME, NIX_DATA_HOME, NIX_STATE_HOME if defined +prs: [11351] +--- + +Added new environment variables: + +- `NIX_CACHE_HOME` +- `NIX_CONFIG_HOME` +- `NIX_DATA_HOME` +- `NIX_STATE_HOME` + +Each, if defined, takes precedence over the corresponding [XDG environment variable](@docroot@/command-ref/env-common.md#xdg-base-directories). +This provides more fine-grained control over where Nix looks for files, and allows to have a stand-alone Nix environment, which only uses files in a specific directory, and doesn't interfere with the user environment. diff --git a/doc/manual/rl-next/build-hook-default.md b/doc/manual/rl-next/build-hook-default.md index 197290536..f13537983 100644 --- a/doc/manual/rl-next/build-hook-default.md +++ b/doc/manual/rl-next/build-hook-default.md @@ -16,7 +16,7 @@ This has a small adverse affect on remote building --- the `build-remote` execut This means that other applications linking `libnixstore` that wish to use remote building must arrange for the `nix` command to be on the PATH (or manually overriding `build-hook`) in order for that to work. Long term we don't envision this being a downside, because we plan to [get rid of `build-remote` and the build hook setting entirely](https://github.com/NixOS/nix/issues/1221). -There is simply no need to add a second layer of remote-procedure-calling when we want to connect to a remote builder. -The build hook protocol did in principle support custom ways of remote building, but that can also be accomplished with a custom service for the ssh or daemon/ssh-ng protocols, or with a custom [store type](@docroot@/store/types/) i.e. `Store` subclass. +There should simply be no need to have an extra, intermediate layer of remote-procedure-calling when we want to connect to a remote builder. +The build hook protocol did in principle support custom ways of remote building, but that can also be accomplished with a custom service for the ssh or daemon/ssh-ng protocols, or with a custom [store type](@docroot@/store/types/index.md) i.e. `Store` subclass. -The Perl bindings no longer expose `getBinDir` either, since they libraries those bindings wrap no longer know the location of installed binaries as described above. +The Perl bindings no longer expose `getBinDir` either, since the underlying C++ libraries those bindings wrap no longer know the location of installed binaries as described above. diff --git a/doc/manual/rl-next/filesystem-errors.md b/doc/manual/rl-next/filesystem-errors.md new file mode 100644 index 000000000..2d5b26228 --- /dev/null +++ b/doc/manual/rl-next/filesystem-errors.md @@ -0,0 +1,14 @@ +--- +synopsis: wrap filesystem exceptions more correctly +issues: [] +prs: [11378] +--- + + +With the switch to `std::filesystem` in different places, Nix started to throw `std::filesystem::filesystem_error` in many places instead of its own exceptions. + +This lead to no longer generating error traces, for example when listing a non-existing directory, and can also lead to crashes inside the Nix REPL. + +This version catches these types of exception correctly and wrap them into Nix's own exeception type. + +Author: [**@Mic92**](https://github.com/Mic92) diff --git a/doc/manual/rl-next/fsync-store-paths.md b/doc/manual/rl-next/fsync-store-paths.md new file mode 100644 index 000000000..0e9e7f7f2 --- /dev/null +++ b/doc/manual/rl-next/fsync-store-paths.md @@ -0,0 +1,9 @@ +--- +synopsis: Add setting `fsync-store-paths` +issues: [1218] +prs: [7126] +--- + +Nix now has a setting `fsync-store-paths` that ensures that new store paths are durably written to disk before they are registered as "valid" in Nix's database. This can prevent Nix store corruption if the system crashes or there is a power loss. This setting defaults to `false`. + +Author: [**@squalus**](https://github.com/squalus) diff --git a/doc/manual/rl-next/nix-fmt-default-argument.md b/doc/manual/rl-next/nix-fmt-default-argument.md new file mode 100644 index 000000000..54161ab30 --- /dev/null +++ b/doc/manual/rl-next/nix-fmt-default-argument.md @@ -0,0 +1,17 @@ +--- +synopsis: Removing the default argument passed to the `nix fmt` formatter +issues: [] +prs: [11438] +--- + +The underlying formatter no longer receives the ". " default argument when `nix fmt` is called with no arguments. + +This change was necessary as the formatter wasn't able to distinguish between +a user wanting to format the current folder with `nix fmt .` or the generic +`nix fmt`. + +The default behaviour is now the responsibility of the formatter itself, and +allows tools such as treefmt to format the whole tree instead of only the +current directory and below. + +Author: [**@zimbatm**](https://github.com/zimbatm) diff --git a/doc/manual/rl-next/no-flake-substitution.md b/doc/manual/rl-next/no-flake-substitution.md new file mode 100644 index 000000000..67ec58750 --- /dev/null +++ b/doc/manual/rl-next/no-flake-substitution.md @@ -0,0 +1,8 @@ +--- +synopsis: Flakes are no longer substituted +prs: [10612] +--- + +Nix will no longer attempt to substitute the source code of flakes from a binary cache. This functionality was broken because it could lead to different evaluation results depending on whether the flake was available in the binary cache, or even depending on whether the flake was already in the local store. + +Author: [**@edolstra**](https://github.com/edolstra) diff --git a/doc/manual/rl-next/verify-tls.md b/doc/manual/rl-next/verify-tls.md new file mode 100644 index 000000000..afc689f46 --- /dev/null +++ b/doc/manual/rl-next/verify-tls.md @@ -0,0 +1,8 @@ +--- +synopsis: "`` uses TLS verification" +prs: [11585] +--- + +Previously `` did not do TLS verification. This was because the Nix sandbox in the past did not have access to TLS certificates, and Nix checks the hash of the fetched file anyway. However, this can expose authentication data from `netrc` and URLs to man-in-the-middle attackers. In addition, Nix now in some cases (such as when using impure derivations) does *not* check the hash. Therefore we have now enabled TLS verification. This means that downloads by `` will now fail if you're fetching from a HTTPS server that does not have a valid certificate. + +`` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issue. diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in similarity index 99% rename from doc/manual/src/SUMMARY.md.in rename to doc/manual/source/SUMMARY.md.in index eef7d189c..d7c312ae7 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -121,6 +121,7 @@ - [Development](development/index.md) - [Building](development/building.md) - [Testing](development/testing.md) + - [Debugging](development/debugging.md) - [Documentation](development/documentation.md) - [CLI guideline](development/cli-guideline.md) - [JSON guideline](development/json-guideline.md) diff --git a/doc/manual/src/_redirects b/doc/manual/source/_redirects similarity index 100% rename from doc/manual/src/_redirects rename to doc/manual/source/_redirects diff --git a/doc/manual/src/advanced-topics/cores-vs-jobs.md b/doc/manual/source/advanced-topics/cores-vs-jobs.md similarity index 100% rename from doc/manual/src/advanced-topics/cores-vs-jobs.md rename to doc/manual/source/advanced-topics/cores-vs-jobs.md diff --git a/doc/manual/src/advanced-topics/diff-hook.md b/doc/manual/source/advanced-topics/diff-hook.md similarity index 100% rename from doc/manual/src/advanced-topics/diff-hook.md rename to doc/manual/source/advanced-topics/diff-hook.md diff --git a/doc/manual/source/advanced-topics/distributed-builds.md b/doc/manual/source/advanced-topics/distributed-builds.md new file mode 100644 index 000000000..66e371888 --- /dev/null +++ b/doc/manual/source/advanced-topics/distributed-builds.md @@ -0,0 +1,108 @@ +# Remote Builds + +A local Nix installation can forward Nix builds to other machines, +this allows multiple builds to be performed in parallel. + +Remote builds also allow Nix to perform multi-platform builds in a +semi-transparent way. For example, if you perform a build for a +`x86_64-darwin` on an `i686-linux` machine, Nix can automatically +forward the build to a `x86_64-darwin` machine, if one is available. + +## Requirements + +For a local machine to forward a build to a remote machine, the remote machine must: + +- Have Nix installed +- Be running an SSH server, e.g. `sshd` +- Be accessible via SSH from the local machine over the network +- Have the local machine's public SSH key in `/etc/ssh/authorized_keys.d/` +- Have the username of the SSH user in the `trusted-users` setting in `nix.conf` + +## Testing + +To test connecting to a remote Nix instance (in this case `mac`), run: + +```console +nix store info --store ssh://username@mac +``` + +To specify an SSH identity file as part of the remote store URI add a +query paramater, e.g. + +```console +nix store info --store ssh://username@mac?ssh-key=/home/alice/my-key +``` + +Since builds should be non-interactive, the key should not have a +passphrase. Alternatively, you can load identities ahead of time into +`ssh-agent` or `gpg-agent`. + +In a multi-user installation (default), builds are executed by the Nix +Daemon. The Nix Daemon cannot prompt for a passphrase via the terminal +or `ssh-agent`, so the SSH key must not have a passphrase. + +In addition, the Nix Daemon's user (typically root) needs to have SSH +access to the remote builder. + +Access can be verified by running `sudo su`, and then validating SSH +access, e.g. by running `ssh mac`. SSH identity files for root users +are usually stored in `/root/.ssh/` (Linux) or `/var/root/.ssh` (MacOS). + +If you get the error + +```console +bash: nix: command not found +error: cannot connect to 'mac' +``` + +then you need to ensure that the `PATH` of non-interactive login shells +contains Nix. + +The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file. +For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine: + +```console +uname +``` + +```console +Linux +``` + +```console +nix build --impure \ + --expr '(with import { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \ + --builders 'ssh://mac x86_64-darwin' +``` + +```console +[1/0/1 built, 0.0 MiB DL] building foo on ssh://mac +``` + +```console +cat ./result +``` + +```console +Darwin +``` + +It is possible to specify multiple build machines separated by a semicolon or a newline, e.g. + +```console + --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd' +``` + +Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g. + + builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd + +After making changes to `nix.conf`, restart the Nix daemon for changes to take effect. + +Finally, remote build machines can be configured in a separate configuration +file included in `builders` via the syntax `@/path/to/file`. For example, + + builders = @/etc/nix/machines + +causes the list of machines in `/etc/nix/machines` to be included. +(This is the default.) diff --git a/doc/manual/src/advanced-topics/index.md b/doc/manual/source/advanced-topics/index.md similarity index 100% rename from doc/manual/src/advanced-topics/index.md rename to doc/manual/source/advanced-topics/index.md diff --git a/doc/manual/src/advanced-topics/post-build-hook.md b/doc/manual/source/advanced-topics/post-build-hook.md similarity index 100% rename from doc/manual/src/advanced-topics/post-build-hook.md rename to doc/manual/source/advanced-topics/post-build-hook.md diff --git a/doc/manual/src/architecture/architecture.md b/doc/manual/source/architecture/architecture.md similarity index 100% rename from doc/manual/src/architecture/architecture.md rename to doc/manual/source/architecture/architecture.md diff --git a/doc/manual/src/c-api.md b/doc/manual/source/c-api.md similarity index 100% rename from doc/manual/src/c-api.md rename to doc/manual/source/c-api.md diff --git a/doc/manual/src/command-ref/conf-file-prefix.md b/doc/manual/source/command-ref/conf-file-prefix.md similarity index 100% rename from doc/manual/src/command-ref/conf-file-prefix.md rename to doc/manual/source/command-ref/conf-file-prefix.md diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/source/command-ref/env-common.md similarity index 90% rename from doc/manual/src/command-ref/env-common.md rename to doc/manual/source/command-ref/env-common.md index 0b5017882..ee3995111 100644 --- a/doc/manual/src/command-ref/env-common.md +++ b/doc/manual/source/command-ref/env-common.md @@ -138,6 +138,19 @@ The following environment variables are used to determine locations of various s - [`XDG_STATE_HOME`]{#env-XDG_STATE_HOME} (default `~/.local/state`) - [`XDG_CACHE_HOME`]{#env-XDG_CACHE_HOME} (default `~/.cache`) - [XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html [`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories + +In addition, setting the following environment variables overrides the XDG base directories: + +- [`NIX_CONFIG_HOME`]{#env-NIX_CONFIG_HOME} (default `$XDG_CONFIG_HOME/nix`) +- [`NIX_STATE_HOME`]{#env-NIX_STATE_HOME} (default `$XDG_STATE_HOME/nix`) +- [`NIX_CACHE_HOME`]{#env-NIX_CACHE_HOME} (default `$XDG_CACHE_HOME/nix`) + +When [`use-xdg-base-directories`] is enabled, the configuration directory is: + +1. `$NIX_CONFIG_HOME`, if it is defined +2. Otherwise, `$XDG_CONFIG_HOME/nix`, if `XDG_CONFIG_HOME` is defined +3. Otherwise, `~/.config/nix`. + +Likewise for the state and cache directories. diff --git a/doc/manual/src/command-ref/experimental-commands.md b/doc/manual/source/command-ref/experimental-commands.md similarity index 100% rename from doc/manual/src/command-ref/experimental-commands.md rename to doc/manual/source/command-ref/experimental-commands.md diff --git a/doc/manual/src/command-ref/files.md b/doc/manual/source/command-ref/files.md similarity index 100% rename from doc/manual/src/command-ref/files.md rename to doc/manual/source/command-ref/files.md diff --git a/doc/manual/src/command-ref/files/channels.md b/doc/manual/source/command-ref/files/channels.md similarity index 100% rename from doc/manual/src/command-ref/files/channels.md rename to doc/manual/source/command-ref/files/channels.md diff --git a/doc/manual/src/command-ref/files/default-nix-expression.md b/doc/manual/source/command-ref/files/default-nix-expression.md similarity index 72% rename from doc/manual/src/command-ref/files/default-nix-expression.md rename to doc/manual/source/command-ref/files/default-nix-expression.md index 620f7035c..2bd45ff5d 100644 --- a/doc/manual/src/command-ref/files/default-nix-expression.md +++ b/doc/manual/source/command-ref/files/default-nix-expression.md @@ -1,6 +1,6 @@ ## Default Nix expression -The source for the default [Nix expressions](@docroot@/language/index.md) used by [`nix-env`]: +The source for the [Nix expressions](@docroot@/glossary.md#gloss-nix-expression) used by [`nix-env`] by default: - `~/.nix-defexpr` - `$XDG_STATE_HOME/nix/defexpr` if [`use-xdg-base-directories`] is set to `true`. @@ -18,24 +18,25 @@ Then, the resulting expression is interpreted like this: - If the expression is an attribute set, it is used as the default Nix expression. - If the expression is a function, an empty set is passed as argument and the return value is used as the default Nix expression. - -For example, if the default expression contains two files, `foo.nix` and `bar.nix`, then the default Nix expression will be equivalent to - -```nix -{ - foo = import ~/.nix-defexpr/foo.nix; - bar = import ~/.nix-defexpr/bar.nix; -} -``` +> **Example** +> +> If the default expression contains two files, `foo.nix` and `bar.nix`, then the default Nix expression will be equivalent to +> +> ```nix +> { +> foo = import ~/.nix-defexpr/foo.nix; +> bar = import ~/.nix-defexpr/bar.nix; +> } +> ``` The file [`manifest.nix`](@docroot@/command-ref/files/manifest.nix.md) is always ignored. -The command [`nix-channel`] places a symlink to the user's current [channels profile](@docroot@/command-ref/files/channels.md) in this directory. +The command [`nix-channel`] places a symlink to the current user's [channels] in this directory, the [user channel link](#user-channel-link). This makes all subscribed channels available as attributes in the default expression. ## User channel link -A symlink that ensures that [`nix-env`] can find your channels: +A symlink that ensures that [`nix-env`] can find the current user's [channels]: - `~/.nix-defexpr/channels` - `$XDG_STATE_HOME/defexpr/channels` if [`use-xdg-base-directories`] is set to `true`. @@ -45,8 +46,9 @@ This symlink points to: - `$XDG_STATE_HOME/profiles/channels` for regular users - `$NIX_STATE_DIR/profiles/per-user/root/channels` for `root` -In a multi-user installation, you may also have `~/.nix-defexpr/channels_root`, which links to the channels of the root user.[`nix-env`]: ../nix-env.md +In a multi-user installation, you may also have `~/.nix-defexpr/channels_root`, which links to the channels of the root user. -[`nix-env`]: @docroot@/command-ref/nix-env.md [`nix-channel`]: @docroot@/command-ref/nix-channel.md +[`nix-env`]: @docroot@/command-ref/nix-env.md [`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories +[channels]: @docroot@/command-ref/files/channels.md diff --git a/doc/manual/src/command-ref/files/manifest.json.md b/doc/manual/source/command-ref/files/manifest.json.md similarity index 100% rename from doc/manual/src/command-ref/files/manifest.json.md rename to doc/manual/source/command-ref/files/manifest.json.md diff --git a/doc/manual/src/command-ref/files/manifest.nix.md b/doc/manual/source/command-ref/files/manifest.nix.md similarity index 100% rename from doc/manual/src/command-ref/files/manifest.nix.md rename to doc/manual/source/command-ref/files/manifest.nix.md diff --git a/doc/manual/src/command-ref/files/profiles.md b/doc/manual/source/command-ref/files/profiles.md similarity index 100% rename from doc/manual/src/command-ref/files/profiles.md rename to doc/manual/source/command-ref/files/profiles.md diff --git a/doc/manual/src/command-ref/index.md b/doc/manual/source/command-ref/index.md similarity index 100% rename from doc/manual/src/command-ref/index.md rename to doc/manual/source/command-ref/index.md diff --git a/doc/manual/src/command-ref/main-commands.md b/doc/manual/source/command-ref/main-commands.md similarity index 100% rename from doc/manual/src/command-ref/main-commands.md rename to doc/manual/source/command-ref/main-commands.md diff --git a/doc/manual/source/command-ref/meson.build b/doc/manual/source/command-ref/meson.build new file mode 100644 index 000000000..2976f69ff --- /dev/null +++ b/doc/manual/source/command-ref/meson.build @@ -0,0 +1,63 @@ +xp_features_json = custom_target( + command : [nix, '__dump-xp-features'], + capture : true, + output : 'xp-features.json', +) + +experimental_features_shortlist_md = custom_target( + command : nix_eval_for_docs + [ + '--expr', + 'import @INPUT0@ (builtins.fromJSON (builtins.readFile ./@INPUT1@))', + ], + input : [ + '../../generate-xp-features-shortlist.nix', + xp_features_json, + ], + output : 'experimental-features-shortlist.md', + capture : true, + env : nix_env_for_docs, +) + +nix3_cli_files = custom_target( + command : [ + python.full_path(), + '@INPUT0@', + '@OUTPUT@', + '--' + ] + nix_eval_for_docs + [ + '--expr', + 'import @INPUT1@ true (builtins.readFile ./@INPUT2@)', + ], + input : [ + '../../remove_before_wrapper.py', + '../../generate-manpage.nix', + nix3_cli_json, + ], + output : 'new-cli', + env : nix_env_for_docs, +) + +conf_file_md_body = custom_target( + command : [ + nix_eval_for_docs, + '--expr', + 'import @INPUT0@ { prefix = "conf"; } (builtins.fromJSON (builtins.readFile ./@INPUT1@))', + ], + capture : true, + input : [ + '../../generate-settings.nix', + conf_file_json, + ], + output : 'conf-file.body.md', + env : nix_env_for_docs, +) + +conf_file_md = custom_target( + command : [ 'cat', '@INPUT0@', '@INPUT1@' ], + capture : true, + input : [ + 'conf-file-prefix.md', + conf_file_md_body, + ], + output : 'conf-file.md', +) diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/source/command-ref/nix-build.md similarity index 100% rename from doc/manual/src/command-ref/nix-build.md rename to doc/manual/source/command-ref/nix-build.md diff --git a/doc/manual/src/command-ref/nix-channel.md b/doc/manual/source/command-ref/nix-channel.md similarity index 100% rename from doc/manual/src/command-ref/nix-channel.md rename to doc/manual/source/command-ref/nix-channel.md diff --git a/doc/manual/src/command-ref/nix-collect-garbage.md b/doc/manual/source/command-ref/nix-collect-garbage.md similarity index 100% rename from doc/manual/src/command-ref/nix-collect-garbage.md rename to doc/manual/source/command-ref/nix-collect-garbage.md diff --git a/doc/manual/src/command-ref/nix-copy-closure.md b/doc/manual/source/command-ref/nix-copy-closure.md similarity index 100% rename from doc/manual/src/command-ref/nix-copy-closure.md rename to doc/manual/source/command-ref/nix-copy-closure.md diff --git a/doc/manual/src/command-ref/nix-daemon.md b/doc/manual/source/command-ref/nix-daemon.md similarity index 100% rename from doc/manual/src/command-ref/nix-daemon.md rename to doc/manual/source/command-ref/nix-daemon.md diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/source/command-ref/nix-env.md similarity index 99% rename from doc/manual/src/command-ref/nix-env.md rename to doc/manual/source/command-ref/nix-env.md index c6f627365..bda02149e 100644 --- a/doc/manual/src/command-ref/nix-env.md +++ b/doc/manual/source/command-ref/nix-env.md @@ -62,7 +62,7 @@ These pages can be viewed offline: Several operations, such as [`nix-env --query`](./nix-env/query.md) and [`nix-env --install`](./nix-env/install.md), take a list of *arguments* that specify the packages on which to operate. -Packages are identified based on a `name` part and a `version` part of a [symbolic derivation name](@docroot@/language/derivations.md#attr-names): +Packages are identified based on a `name` part and a `version` part of a [symbolic derivation name](@docroot@/language/derivations.md#attr-name): - `name`: Everything up to but not including the first dash (`-`) that is *not* followed by a letter. - `version`: The rest, excluding the separating dash. diff --git a/doc/manual/src/command-ref/nix-env/delete-generations.md b/doc/manual/source/command-ref/nix-env/delete-generations.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/delete-generations.md rename to doc/manual/source/command-ref/nix-env/delete-generations.md diff --git a/doc/manual/src/command-ref/nix-env/env-common.md b/doc/manual/source/command-ref/nix-env/env-common.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/env-common.md rename to doc/manual/source/command-ref/nix-env/env-common.md diff --git a/doc/manual/src/command-ref/nix-env/install.md b/doc/manual/source/command-ref/nix-env/install.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/install.md rename to doc/manual/source/command-ref/nix-env/install.md diff --git a/doc/manual/src/command-ref/nix-env/list-generations.md b/doc/manual/source/command-ref/nix-env/list-generations.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/list-generations.md rename to doc/manual/source/command-ref/nix-env/list-generations.md diff --git a/doc/manual/src/command-ref/nix-env/opt-common.md b/doc/manual/source/command-ref/nix-env/opt-common.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/opt-common.md rename to doc/manual/source/command-ref/nix-env/opt-common.md diff --git a/doc/manual/src/command-ref/nix-env/query.md b/doc/manual/source/command-ref/nix-env/query.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/query.md rename to doc/manual/source/command-ref/nix-env/query.md diff --git a/doc/manual/src/command-ref/nix-env/rollback.md b/doc/manual/source/command-ref/nix-env/rollback.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/rollback.md rename to doc/manual/source/command-ref/nix-env/rollback.md diff --git a/doc/manual/src/command-ref/nix-env/set-flag.md b/doc/manual/source/command-ref/nix-env/set-flag.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/set-flag.md rename to doc/manual/source/command-ref/nix-env/set-flag.md diff --git a/doc/manual/src/command-ref/nix-env/set.md b/doc/manual/source/command-ref/nix-env/set.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/set.md rename to doc/manual/source/command-ref/nix-env/set.md diff --git a/doc/manual/src/command-ref/nix-env/switch-generation.md b/doc/manual/source/command-ref/nix-env/switch-generation.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/switch-generation.md rename to doc/manual/source/command-ref/nix-env/switch-generation.md diff --git a/doc/manual/src/command-ref/nix-env/switch-profile.md b/doc/manual/source/command-ref/nix-env/switch-profile.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/switch-profile.md rename to doc/manual/source/command-ref/nix-env/switch-profile.md diff --git a/doc/manual/src/command-ref/nix-env/uninstall.md b/doc/manual/source/command-ref/nix-env/uninstall.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/uninstall.md rename to doc/manual/source/command-ref/nix-env/uninstall.md diff --git a/doc/manual/src/command-ref/nix-env/upgrade.md b/doc/manual/source/command-ref/nix-env/upgrade.md similarity index 100% rename from doc/manual/src/command-ref/nix-env/upgrade.md rename to doc/manual/source/command-ref/nix-env/upgrade.md diff --git a/doc/manual/src/command-ref/nix-hash.md b/doc/manual/source/command-ref/nix-hash.md similarity index 100% rename from doc/manual/src/command-ref/nix-hash.md rename to doc/manual/source/command-ref/nix-hash.md diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/source/command-ref/nix-instantiate.md similarity index 100% rename from doc/manual/src/command-ref/nix-instantiate.md rename to doc/manual/source/command-ref/nix-instantiate.md diff --git a/doc/manual/src/command-ref/nix-prefetch-url.md b/doc/manual/source/command-ref/nix-prefetch-url.md similarity index 100% rename from doc/manual/src/command-ref/nix-prefetch-url.md rename to doc/manual/source/command-ref/nix-prefetch-url.md diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/source/command-ref/nix-shell.md similarity index 100% rename from doc/manual/src/command-ref/nix-shell.md rename to doc/manual/source/command-ref/nix-shell.md diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/source/command-ref/nix-store.md similarity index 100% rename from doc/manual/src/command-ref/nix-store.md rename to doc/manual/source/command-ref/nix-store.md diff --git a/doc/manual/src/command-ref/nix-store/add-fixed.md b/doc/manual/source/command-ref/nix-store/add-fixed.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/add-fixed.md rename to doc/manual/source/command-ref/nix-store/add-fixed.md diff --git a/doc/manual/src/command-ref/nix-store/add.md b/doc/manual/source/command-ref/nix-store/add.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/add.md rename to doc/manual/source/command-ref/nix-store/add.md diff --git a/doc/manual/src/command-ref/nix-store/delete.md b/doc/manual/source/command-ref/nix-store/delete.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/delete.md rename to doc/manual/source/command-ref/nix-store/delete.md diff --git a/doc/manual/src/command-ref/nix-store/dump-db.md b/doc/manual/source/command-ref/nix-store/dump-db.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/dump-db.md rename to doc/manual/source/command-ref/nix-store/dump-db.md diff --git a/doc/manual/src/command-ref/nix-store/dump.md b/doc/manual/source/command-ref/nix-store/dump.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/dump.md rename to doc/manual/source/command-ref/nix-store/dump.md diff --git a/doc/manual/src/command-ref/nix-store/export.md b/doc/manual/source/command-ref/nix-store/export.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/export.md rename to doc/manual/source/command-ref/nix-store/export.md diff --git a/doc/manual/src/command-ref/nix-store/gc.md b/doc/manual/source/command-ref/nix-store/gc.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/gc.md rename to doc/manual/source/command-ref/nix-store/gc.md diff --git a/doc/manual/src/command-ref/nix-store/generate-binary-cache-key.md b/doc/manual/source/command-ref/nix-store/generate-binary-cache-key.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/generate-binary-cache-key.md rename to doc/manual/source/command-ref/nix-store/generate-binary-cache-key.md diff --git a/doc/manual/src/command-ref/nix-store/import.md b/doc/manual/source/command-ref/nix-store/import.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/import.md rename to doc/manual/source/command-ref/nix-store/import.md diff --git a/doc/manual/src/command-ref/nix-store/load-db.md b/doc/manual/source/command-ref/nix-store/load-db.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/load-db.md rename to doc/manual/source/command-ref/nix-store/load-db.md diff --git a/doc/manual/src/command-ref/nix-store/opt-common.md b/doc/manual/source/command-ref/nix-store/opt-common.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/opt-common.md rename to doc/manual/source/command-ref/nix-store/opt-common.md diff --git a/doc/manual/src/command-ref/nix-store/optimise.md b/doc/manual/source/command-ref/nix-store/optimise.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/optimise.md rename to doc/manual/source/command-ref/nix-store/optimise.md diff --git a/doc/manual/src/command-ref/nix-store/print-env.md b/doc/manual/source/command-ref/nix-store/print-env.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/print-env.md rename to doc/manual/source/command-ref/nix-store/print-env.md diff --git a/doc/manual/src/command-ref/nix-store/query.md b/doc/manual/source/command-ref/nix-store/query.md similarity index 99% rename from doc/manual/src/command-ref/nix-store/query.md rename to doc/manual/source/command-ref/nix-store/query.md index b4efa734e..601f46af6 100644 --- a/doc/manual/src/command-ref/nix-store/query.md +++ b/doc/manual/source/command-ref/nix-store/query.md @@ -104,7 +104,7 @@ symlink. Prints a set of derivation files (`.drv`) which are supposed produce said paths when realized. Might print nothing, for example for source paths - or paths subsituted from a binary cache. + or paths substituted from a binary cache. - `--graph` @@ -241,4 +241,3 @@ $ nix-store --query --roots $(which svn) /nix/var/nix/profiles/default-82-link /home/eelco/.local/state/nix/profiles/profile-97-link ``` - diff --git a/doc/manual/src/command-ref/nix-store/read-log.md b/doc/manual/source/command-ref/nix-store/read-log.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/read-log.md rename to doc/manual/source/command-ref/nix-store/read-log.md diff --git a/doc/manual/src/command-ref/nix-store/realise.md b/doc/manual/source/command-ref/nix-store/realise.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/realise.md rename to doc/manual/source/command-ref/nix-store/realise.md diff --git a/doc/manual/src/command-ref/nix-store/repair-path.md b/doc/manual/source/command-ref/nix-store/repair-path.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/repair-path.md rename to doc/manual/source/command-ref/nix-store/repair-path.md diff --git a/doc/manual/src/command-ref/nix-store/restore.md b/doc/manual/source/command-ref/nix-store/restore.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/restore.md rename to doc/manual/source/command-ref/nix-store/restore.md diff --git a/doc/manual/src/command-ref/nix-store/serve.md b/doc/manual/source/command-ref/nix-store/serve.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/serve.md rename to doc/manual/source/command-ref/nix-store/serve.md diff --git a/doc/manual/src/command-ref/nix-store/verify-path.md b/doc/manual/source/command-ref/nix-store/verify-path.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/verify-path.md rename to doc/manual/source/command-ref/nix-store/verify-path.md diff --git a/doc/manual/src/command-ref/nix-store/verify.md b/doc/manual/source/command-ref/nix-store/verify.md similarity index 100% rename from doc/manual/src/command-ref/nix-store/verify.md rename to doc/manual/source/command-ref/nix-store/verify.md diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/source/command-ref/opt-common.md similarity index 92% rename from doc/manual/src/command-ref/opt-common.md rename to doc/manual/source/command-ref/opt-common.md index 69a700207..70ae03959 100644 --- a/doc/manual/src/command-ref/opt-common.md +++ b/doc/manual/source/command-ref/opt-common.md @@ -1,3 +1,7 @@ + + # Common Options Most Nix commands accept the following command-line options: @@ -161,6 +165,14 @@ Most Nix commands accept the following command-line options: You can override this using `--arg`, e.g., `nix-env --install --attr pkgname --arg system \"i686-freebsd\"`. (Note that since the argument is a Nix string literal, you have to escape the quotes.) +- [`--arg-from-file`](#opt-arg-from-file) *name* *path* + + Pass the contents of file *path* as the argument *name* to Nix functions. + +- [`--arg-from-stdin`](#opt-arg-from-stdin) *name* + + Pass the contents of stdin as the argument *name* to Nix functions. + - [`--argstr`](#opt-argstr) *name* *value* This option is like `--arg`, only the value is not a Nix expression but a string. @@ -179,6 +191,10 @@ Most Nix commands accept the following command-line options: attribute of the fourth element of the array in the `foo` attribute of the top-level expression. +- [`--eval-store`](#opt-eval-store) *store-url* + + The [URL to the Nix store](@docroot@/store/types/index.md#store-url-format) to use for evaluation, i.e. where to store derivations (`.drv` files) and inputs referenced by them. + - [`--expr`](#opt-expr) / `-E` Interpret the command line arguments as a list of Nix expressions to be parsed and evaluated, rather than as a list of file names of Nix expressions. @@ -194,6 +210,10 @@ Most Nix commands accept the following command-line options: Paths added through `-I` take precedence over the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path) and the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH). +- [`--impure`](#opt-impure) + + Allow access to mutable paths and repositories. + - [`--option`](#opt-option) *name* *value* Set the Nix configuration option *name* to *value*. diff --git a/doc/manual/src/command-ref/status-build-failure.md b/doc/manual/source/command-ref/status-build-failure.md similarity index 100% rename from doc/manual/src/command-ref/status-build-failure.md rename to doc/manual/source/command-ref/status-build-failure.md diff --git a/doc/manual/src/command-ref/utilities.md b/doc/manual/source/command-ref/utilities.md similarity index 100% rename from doc/manual/src/command-ref/utilities.md rename to doc/manual/source/command-ref/utilities.md diff --git a/doc/manual/src/development/building.md b/doc/manual/source/development/building.md similarity index 79% rename from doc/manual/src/development/building.md rename to doc/manual/source/development/building.md index 5a5fb3368..dbf080296 100644 --- a/doc/manual/src/development/building.md +++ b/doc/manual/source/development/building.md @@ -34,16 +34,21 @@ $ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages To build Nix itself in this shell: ```console -[nix-shell]$ autoreconfPhase -[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out -[nix-shell]$ make -j $NIX_BUILD_CORES +[nix-shell]$ mesonFlags+=" --prefix=$(pwd)/outputs/out" +[nix-shell]$ dontAddPrefix=1 mesonConfigurePhase +[nix-shell]$ ninjaBuildPhase ``` -To install it in `$(pwd)/outputs` and test it: +To test it: ```console -[nix-shell]$ make install -[nix-shell]$ make installcheck -j $NIX_BUILD_CORES +[nix-shell]$ mesonCheckPhase +``` + +To install it in `$(pwd)/outputs`: + +```console +[nix-shell]$ ninjaInstallPhase [nix-shell]$ ./outputs/out/bin/nix --version nix (Nix) 2.12 ``` @@ -85,16 +90,20 @@ $ nix develop .#native-clangStdenvPackages To build Nix itself in this shell: ```console -[nix-shell]$ autoreconfPhase -[nix-shell]$ configurePhase -[nix-shell]$ make -j $NIX_BUILD_CORES OPTIMIZE=0 +[nix-shell]$ mesonConfigurePhase +[nix-shell]$ ninjaBuildPhase ``` -To install it in `$(pwd)/outputs` and test it: +To test it: ```console -[nix-shell]$ make install OPTIMIZE=0 -[nix-shell]$ make installcheck check -j $NIX_BUILD_CORES +[nix-shell]$ mesonCheckPhase +``` + +To install it in `$(pwd)/outputs`: + +```console +[nix-shell]$ ninjaInstallPhase [nix-shell]$ nix --version nix (Nix) 2.12 ``` @@ -110,25 +119,6 @@ $ nix build You can also build Nix for one of the [supported platforms](#platforms). -## Makefile variables - -You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`. - -Run `make` with [`-e` / `--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: - -- `ENABLE_BUILD=yes` to enable building the C++ code. -- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). - - The docs can take a while to build, so you may want to disable this for local development. -- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. -- `OPTIMIZE=1` to enable optimizations. -- `libraries=libutil programs=` to only build a specific library. - - This will fail in the linking phase if the other libraries haven't been built, but is useful for checking types. -- `libraries= programs=nix` to only build a specific program. - - This will not work in general, because the programs need the libraries. - ## Platforms Nix can be built for various platforms, as specified in [`flake.nix`]: @@ -175,27 +165,38 @@ Add more [system types](#system-type) to `crossSystems` in `flake.nix` to bootst It is useful to perform multiple cross and native builds on the same source tree, for example to ensure that better support for one platform doesn't break the build for another. -In order to facilitate this, Nix has some support for being built out of tree – that is, placing build artefacts in a different directory than the source code: +Meson thankfully makes this very easy by confining all build products to the build directory --- one simple shares the source directory between multiple build directories, each of which contains the build for Nix to a different platform. -1. Create a directory for the build, e.g. +Nixpkgs's `mesonConfigurePhase` always chooses `build` in the current directory as the name and location of the build. +This makes having multiple build directories slightly more inconvenient. +The good news is that Meson/Ninja seem to cope well with relocating the build directory after it is created. + +Here's how to do that + +1. Configure as usual ```bash - mkdir build + mesonConfigurePhase ``` -2. Run the configure script from that directory, e.g. +2. Rename the build directory ```bash - cd build - ../configure + cd .. # since `mesonConfigurePhase` cd'd inside + mv build build-linux # or whatever name we want + cd build-linux ``` -3. Run make from the source directory, but with the build directory specified, e.g. +3. Build as usual ```bash - make builddir=build + ninjaBuildPhase ``` +> **N.B.** +> [`nixpkgs#335818`](https://github.com/NixOS/nixpkgs/issues/335818) tracks giving `mesonConfigurePhase` proper support for custom build directories. +> When it is fixed, we can simplify these instructions and then remove this notice. + ## System type Nix uses a string with the following format to identify the *system type* or *platform* it runs on: @@ -257,11 +258,8 @@ You can use any of the other supported environments in place of `nix-ccacheStden The `clangd` LSP server is installed by default on the `clang`-based `devShell`s. See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix). -To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running: - -```console -make compile_commands.json -``` +To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code. +Meson's configure always produces this inside the build directory. Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages` shell. You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration). @@ -276,7 +274,7 @@ Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages You may run the formatters as a one-off using: ```console -make format +./maintainers/format.sh ``` If you'd like to run the formatters before every commit, install the hooks: diff --git a/doc/manual/src/development/cli-guideline.md b/doc/manual/source/development/cli-guideline.md similarity index 100% rename from doc/manual/src/development/cli-guideline.md rename to doc/manual/source/development/cli-guideline.md diff --git a/doc/manual/src/development/contributing.md b/doc/manual/source/development/contributing.md similarity index 100% rename from doc/manual/src/development/contributing.md rename to doc/manual/source/development/contributing.md diff --git a/doc/manual/src/development/cxx.md b/doc/manual/source/development/cxx.md similarity index 100% rename from doc/manual/src/development/cxx.md rename to doc/manual/source/development/cxx.md diff --git a/doc/manual/source/development/debugging.md b/doc/manual/source/development/debugging.md new file mode 100644 index 000000000..ce623110b --- /dev/null +++ b/doc/manual/source/development/debugging.md @@ -0,0 +1,62 @@ +# Debugging Nix + +This section shows how to build and debug Nix with debug symbols enabled. + +## Building Nix with Debug Symbols + +In the development shell, set the `mesonBuildType` environment variable to `debug` before configuring the build: + +```console +[nix-shell]$ export mesonBuildType=debugoptimized +``` + +Then, proceed to build Nix as described in [Building Nix](./building.md). +This will build Nix with debug symbols, which are essential for effective debugging. + +## Debugging the Nix Binary + +Obtain your preferred debugger within the development shell: + +```console +[nix-shell]$ nix-shell -p gdb +``` + +On macOS, use `lldb`: + +```console +[nix-shell]$ nix-shell -p lldb +``` + +### Launching the Debugger + +To debug the Nix binary, run: + +```console +[nix-shell]$ gdb --args ../outputs/out/bin/nix +``` + +On macOS, use `lldb`: + +```console +[nix-shell]$ lldb -- ../outputs/out/bin/nix +``` + +### Using the Debugger + +Inside the debugger, you can set breakpoints, run the program, and inspect variables. + +```gdb +(gdb) break main +(gdb) run +``` + +Refer to the [GDB Documentation](https://www.gnu.org/software/gdb/documentation/) for comprehensive usage instructions. + +On macOS, use `lldb`: + +```lldb +(lldb) breakpoint set --name main +(lldb) process launch -- +``` + +Refer to the [LLDB Tutorial](https://lldb.llvm.org/use/tutorial.html) for comprehensive usage instructions. diff --git a/doc/manual/src/development/documentation.md b/doc/manual/source/development/documentation.md similarity index 97% rename from doc/manual/src/development/documentation.md rename to doc/manual/source/development/documentation.md index 63f574ab7..d51373e7b 100644 --- a/doc/manual/src/development/documentation.md +++ b/doc/manual/source/development/documentation.md @@ -13,13 +13,13 @@ Incremental refactorings of the documentation build setup to make it faster or e Build the manual from scratch: ```console -nix-build $(nix-instantiate)'!doc' +nix-build -E '(import ./.).packages.${builtins.currentSystem}.nix.doc' ``` or ```console -nix build .#^doc +nix build .#nix^doc ``` and open `./result-doc/share/doc/nix/manual/index.html`. @@ -35,7 +35,7 @@ In order to reflect changes to the [Makefile for the manual], clear all generate [Makefile for the manual]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk ```console -rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES +rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/source/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES ``` ## Style guide @@ -182,7 +182,7 @@ Please observe these guidelines to ease reviews: `@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own. -If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory. +If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/source` directory. If the `@docroot@` literal appears in an error message from the [`mdbook-linkcheck`] tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it. See existing `@docroot@` logic in the [Makefile for the manual]. diff --git a/doc/manual/src/development/experimental-features.md b/doc/manual/source/development/experimental-features.md similarity index 100% rename from doc/manual/src/development/experimental-features.md rename to doc/manual/source/development/experimental-features.md diff --git a/doc/manual/src/development/index.md b/doc/manual/source/development/index.md similarity index 100% rename from doc/manual/src/development/index.md rename to doc/manual/source/development/index.md diff --git a/doc/manual/src/development/json-guideline.md b/doc/manual/source/development/json-guideline.md similarity index 98% rename from doc/manual/src/development/json-guideline.md rename to doc/manual/source/development/json-guideline.md index b4bc92af9..309b4b3a0 100644 --- a/doc/manual/src/development/json-guideline.md +++ b/doc/manual/source/development/json-guideline.md @@ -90,7 +90,7 @@ This representation is extensible and preserves the ordering: ## Self-describing values -As described in the previous section, it's crucial that schemas can be extended with with new fields without breaking compatibility. +As described in the previous section, it's crucial that schemas can be extended with new fields without breaking compatibility. However, that should *not* mean we use the presence/absence of fields to indicate optional information *within* a version of the schema. Instead, always include the field, and use `null` to indicate the "nothing" case. diff --git a/doc/manual/source/development/meson.build b/doc/manual/source/development/meson.build new file mode 100644 index 000000000..5ffbfe394 --- /dev/null +++ b/doc/manual/source/development/meson.build @@ -0,0 +1,12 @@ +experimental_feature_descriptions_md = custom_target( + command : nix_eval_for_docs + [ + '--expr', + 'import @INPUT0@ (builtins.fromJSON (builtins.readFile @INPUT1@))', + ], + input : [ + '../../generate-xp-features.nix', + xp_features_json, + ], + capture : true, + output : 'experimental-feature-descriptions.md', +) diff --git a/doc/manual/src/development/testing.md b/doc/manual/source/development/testing.md similarity index 83% rename from doc/manual/src/development/testing.md rename to doc/manual/source/development/testing.md index 3949164d5..a9f7c939c 100644 --- a/doc/manual/src/development/testing.md +++ b/doc/manual/source/development/testing.md @@ -29,7 +29,7 @@ The unit tests are defined using the [googletest] and [rapidcheck] frameworks. > ``` > src > ├── libexpr -> │ ├── local.mk +> │ ├── meson.build > │ ├── value/context.hh > │ ├── value/context.cc > │ … @@ -37,33 +37,32 @@ The unit tests are defined using the [googletest] and [rapidcheck] frameworks. > ├── tests > │ │ > │ … -> │ └── unit -> │ ├── libutil -> │ │ ├── local.mk -> │ │ … -> │ │ └── data -> │ │ ├── git/tree.txt -> │ │ … -> │ │ -> │ ├── libexpr-support -> │ │ ├── local.mk -> │ │ └── tests -> │ │ ├── value/context.hh -> │ │ ├── value/context.cc -> │ │ … -> │ │ -> │ ├── libexpr -> │ … ├── local.mk -> │ ├── value/context.cc -> │ … +> │ ├── libutil-tests +> │ │ ├── meson.build +> │ │ … +> │ │ └── data +> │ │ ├── git/tree.txt +> │ │ … +> │ │ +> │ ├── libexpr-test-support +> │ │ ├── meson.build +> │ │ └── tests +> │ │ ├── value/context.hh +> │ │ ├── value/context.cc +> │ │ … +> │ │ +> │ ├── libexpr-tests +> │ … ├── meson.build +> │ ├── value/context.cc +> │ … > … > ``` The tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `src/${library_name_without-nix}-test`. -Given an interface (header) and implementation pair in the original library, say, `src/libexpr/value/context.{hh,cc}`, we write tests for it in `src/nix-expr-tests/value/context.cc`, and (possibly) declare/define additional interfaces for testing purposes in `src/nix-expr-test-support/tests/value/context.{hh,cc}`. +Given an interface (header) and implementation pair in the original library, say, `src/libexpr/value/context.{hh,cc}`, we write tests for it in `src/libexpr-tests/value/context.cc`, and (possibly) declare/define additional interfaces for testing purposes in `src/libexpr-test-support/tests/value/context.{hh,cc}`. Data for unit tests is stored in a `data` subdir of the directory for each unit test executable. -For example, `libnixstore` code is in `src/libstore`, and its test data is in `src/nix-store-tests/data`. +For example, `libnixstore` code is in `src/libstore`, and its test data is in `src/libstore-tests/data`. The path to the `src/${library_name_without-nix}-test/data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`. Note that each executable only gets the data for its tests. @@ -128,67 +127,63 @@ On other platforms they wouldn't be run at all. ## Functional tests -The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`. +The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/meson.build`. Each test is a bash script. Functional tests are run during `installCheck` in the `nix` package build, as well as separately from the build, in VM tests. ### Running the whole test suite -The whole test suite can be run with: +The whole test suite (functional and unit tests) can be run with: ```shell-session -$ make install && make installcheck -ran test tests/functional/foo.sh... [PASS] -ran test tests/functional/bar.sh... [PASS] -... +$ mesonCheckPhase ``` ### Grouping tests Sometimes it is useful to group related tests so they can be easily run together without running the entire test suite. Each test group is in a subdirectory of `tests`. -For example, `tests/functional/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs. +For example, `tests/functional/ca/meson.build` defines a `ca` test group for content-addressed derivation outputs. That test group can be run like this: ```shell-session -$ make ca.test-group -j50 -ran test tests/functional/ca/nix-run.sh... [PASS] -ran test tests/functional/ca/import-derivation.sh... [PASS] -... -``` - -The test group is defined in Make like this: -```makefile -$(test-group-name)-tests := \ - $(d)/test0.sh \ - $(d)/test1.sh \ - ... - -install-tests-groups += $(test-group-name) +$ meson test --suite ca +ninja: Entering directory `/home/jcericson/src/nix/master/build' +ninja: no work to do. +[1-20/20] 🌑 nix-functional-tests:ca / ca/why-depends 1/20 nix-functional-tests:ca / ca/nix-run OK 0.16s +[2-20/20] 🌒 nix-functional-tests:ca / ca/why-depends 2/20 nix-functional-tests:ca / ca/import-derivation OK 0.17s ``` ### Running individual tests -Individual tests can be run with `make`: +Individual tests can be run with `meson`: ```shell-session -$ make tests/functional/${testName}.sh.test -ran test tests/functional/${testName}.sh... [PASS] +$ meson test --verbose ${testName} +ninja: Entering directory `/home/jcericson/src/nix/master/build' +ninja: no work to do. +1/1 nix-functional-tests:main / ${testName} OK 0.41s + +Ok: 1 +Expected Fail: 0 +Fail: 0 +Unexpected Pass: 0 +Skipped: 0 +Timeout: 0 + +Full log written to /home/jcericson/src/nix/master/build/meson-logs/testlog.txt ``` -or without `make`: +The `--verbose` flag will make Meson also show the console output of each test for easier debugging. +The test script will then be traced with `set -x` and the output displayed as it happens, +regardless of whether the test succeeds or fails. + +Tests can be also run directly without `meson`: ```shell-session -$ ./mk/run-test.sh tests/functional/${testName}.sh -ran test tests/functional/${testName}.sh... [PASS] -``` - -To see the complete output, one can also run: - -```shell-session -$ ./mk/debug-test.sh tests/functional/${testName}.sh +$ TEST_NAME=${testName} NIX_REMOTE='' PS4='+(${BASH_SOURCE[0]-$0}:$LINENO) tests/functional/${testName}.sh +(${testName}.sh:1) foo output from foo +(${testName}.sh:2) bar @@ -196,8 +191,6 @@ output from bar ... ``` -The test script will then be traced with `set -x` and the output displayed as it happens, regardless of whether the test succeeds or fails. - ### Debugging failing functional tests When a functional test fails, it usually does so somewhere in the middle of the script. @@ -254,7 +247,7 @@ It is frequently useful to regenerate the expected output. To do that, rerun the failed test(s) with `_NIX_TEST_ACCEPT=1`. For example: ```bash -_NIX_TEST_ACCEPT=1 make tests/functional/lang.sh.test +_NIX_TEST_ACCEPT=1 meson test lang ``` This convention is shared with the [characterisation unit tests](#characterisation-testing-unit) too. @@ -276,14 +269,12 @@ To ensure that characterisation testing doesn't make it harder to intentionally We run the functional tests not just in the build, but also in VM tests. This helps us ensure that Nix works correctly on NixOS, and environments that have similar characteristics that are hard to reproduce in a build environment. -The recommended way to run these tests during development is: +These can be run with: ```shell -nix build .#hydraJobs.tests.functional_user.quickBuild +nix build .#hydraJobs.tests.functional_user ``` -The `quickBuild` attribute configures the test to use a `nix` package that's built without integration tests, so that you can iterate on the tests without performing recompilations due to the changed sources for `installCheck`. - Generally, this build is sufficient, but in nightly or CI we also test the attributes `functional_root` and `functional_trusted`, in which the test suite is run with different levels of authorization. ## Integration tests @@ -294,8 +285,6 @@ Because these tests are expensive and require more than what the standard github You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`. -If you are testing a build of `nix` that you haven't compiled yet, you may iterate faster by appending the `quickBuild` attribute: `nix build .#hydraJobs.tests.{testName}.quickBuild`. - ## Installer tests After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch. diff --git a/doc/manual/src/favicon.png b/doc/manual/source/favicon.png similarity index 100% rename from doc/manual/src/favicon.png rename to doc/manual/source/favicon.png diff --git a/doc/manual/src/favicon.svg b/doc/manual/source/favicon.svg similarity index 100% rename from doc/manual/src/favicon.svg rename to doc/manual/source/favicon.svg diff --git a/doc/manual/src/figures/user-environments.png b/doc/manual/source/figures/user-environments.png similarity index 100% rename from doc/manual/src/figures/user-environments.png rename to doc/manual/source/figures/user-environments.png diff --git a/doc/manual/src/figures/user-environments.sxd b/doc/manual/source/figures/user-environments.sxd similarity index 100% rename from doc/manual/src/figures/user-environments.sxd rename to doc/manual/source/figures/user-environments.sxd diff --git a/doc/manual/src/glossary.md b/doc/manual/source/glossary.md similarity index 96% rename from doc/manual/src/glossary.md rename to doc/manual/source/glossary.md index 877c4668b..fa357ece3 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/source/glossary.md @@ -119,7 +119,7 @@ A store object consists of a [file system object], [references][reference] to other store objects, and other metadata. It can be referred to by a [store path]. - See [Store Object](@docroot@/store/index.md#store-object) for details. + See [Store Object](@docroot@/store/store-object.md) for details. [store object]: #gloss-store-object @@ -182,13 +182,18 @@ - [Nix expression]{#gloss-nix-expression} - 1. Commonly, a high-level description of software packages and compositions - thereof. Deploying software using Nix entails writing Nix - expressions for your packages. Nix expressions specify [derivations][derivation], - which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. - These derivations can then be [realised][realise] to produce [outputs][output]. + A syntactically valid use of the [Nix language]. - 2. A syntactically valid use of the [Nix language]. For example, the contents of a `.nix` file form an expression. + > **Example** + > + > The contents of a `.nix` file form a Nix expression. + + Nix expressions specify [derivations][derivation], which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. + These derivations can then be [realised][realise] to produce [outputs][output]. + + > **Example** + > + > Building and deploying software using Nix entails writing Nix expressions as a high-level description of packages and compositions thereof. - [reference]{#gloss-reference} diff --git a/doc/manual/src/installation/building-source.md b/doc/manual/source/installation/building-source.md similarity index 52% rename from doc/manual/src/installation/building-source.md rename to doc/manual/source/installation/building-source.md index 7dad9805a..d35cc18c2 100644 --- a/doc/manual/src/installation/building-source.md +++ b/doc/manual/source/installation/building-source.md @@ -1,31 +1,26 @@ # Building Nix from Source -After cloning Nix's Git repository, issue the following commands: +Nix is built with [Meson](https://mesonbuild.com/). +It is broken up into multiple Meson packages, which are optionally combined in a single project using Meson's [subprojects](https://mesonbuild.com/Subprojects.html) feature. -```console -$ autoreconf -vfi -$ ./configure options... -$ make -$ make install -``` +There are no mandatory extra steps to the building process: +generic Meson installation instructions like [this](https://mesonbuild.com/Quick-guide.html#using-meson-as-a-distro-packager) should work. -Nix requires GNU Make so you may need to invoke `gmake` instead. - -The installation path can be specified by passing the `--prefix=prefix` +The installation path can be specified by passing the `-Dprefix=prefix` to `configure`. The default installation directory is `/usr/local`. You can change this to any location you like. You must have write permission to the *prefix* path. Nix keeps its *store* (the place where packages are stored) in `/nix/store` by default. This can be changed using -`--with-store-dir=path`. +`-Dstore-dir=path`. > **Warning** -> +> > It is best *not* to change the Nix store from its default, since doing > so makes it impossible to use pre-built binaries from the standard > Nixpkgs channels — that is, all packages will need to be built from > source. Nix keeps state (such as its database and log files) in `/nix/var` by -default. This can be changed using `--localstatedir=path`. +default. This can be changed using `-Dlocalstatedir=path`. diff --git a/doc/manual/src/installation/env-variables.md b/doc/manual/source/installation/env-variables.md similarity index 100% rename from doc/manual/src/installation/env-variables.md rename to doc/manual/source/installation/env-variables.md diff --git a/doc/manual/src/installation/index.md b/doc/manual/source/installation/index.md similarity index 64% rename from doc/manual/src/installation/index.md rename to doc/manual/source/installation/index.md index dafdeb667..48725c1ba 100644 --- a/doc/manual/src/installation/index.md +++ b/doc/manual/source/installation/index.md @@ -14,8 +14,16 @@ This option requires either: * Linux running systemd, with SELinux disabled * MacOS +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + ```console -$ bash <(curl -L https://nixos.org/nix/install) --daemon +$ curl -L https://nixos.org/nix/install | sh -s -- --daemon ``` ## Single-user @@ -28,7 +36,7 @@ cannot offer equivalent sharing, isolation, or security. This option is suitable for systems without systemd. ```console -$ bash <(curl -L https://nixos.org/nix/install) --no-daemon +$ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon ``` ## Distributions diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/source/installation/installing-binary.md similarity index 93% rename from doc/manual/src/installation/installing-binary.md rename to doc/manual/source/installation/installing-binary.md index 6a168ff3d..6a1a5ddca 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/source/installation/installing-binary.md @@ -1,5 +1,13 @@ # Installing a Binary Distribution +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + To install the latest version Nix, run the following command: ```console diff --git a/doc/manual/src/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md similarity index 100% rename from doc/manual/src/installation/installing-docker.md rename to doc/manual/source/installation/installing-docker.md diff --git a/doc/manual/src/installation/installing-source.md b/doc/manual/source/installation/installing-source.md similarity index 100% rename from doc/manual/src/installation/installing-source.md rename to doc/manual/source/installation/installing-source.md diff --git a/doc/manual/src/installation/multi-user.md b/doc/manual/source/installation/multi-user.md similarity index 100% rename from doc/manual/src/installation/multi-user.md rename to doc/manual/source/installation/multi-user.md diff --git a/doc/manual/src/installation/nix-security.md b/doc/manual/source/installation/nix-security.md similarity index 100% rename from doc/manual/src/installation/nix-security.md rename to doc/manual/source/installation/nix-security.md diff --git a/doc/manual/src/installation/obtaining-source.md b/doc/manual/source/installation/obtaining-source.md similarity index 100% rename from doc/manual/src/installation/obtaining-source.md rename to doc/manual/source/installation/obtaining-source.md diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/source/installation/prerequisites-source.md similarity index 94% rename from doc/manual/src/installation/prerequisites-source.md rename to doc/manual/source/installation/prerequisites-source.md index 4aafa6d27..c346a0a4b 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/source/installation/prerequisites-source.md @@ -39,8 +39,6 @@ `pkgconfig` and the Boehm garbage collector, and pass the flag `--enable-gc` to `configure`. - For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied. - - The `boost` library of version 1.66.0 or higher. It can be obtained from the official web site . diff --git a/doc/manual/src/installation/single-user.md b/doc/manual/source/installation/single-user.md similarity index 100% rename from doc/manual/src/installation/single-user.md rename to doc/manual/source/installation/single-user.md diff --git a/doc/manual/src/installation/supported-platforms.md b/doc/manual/source/installation/supported-platforms.md similarity index 100% rename from doc/manual/src/installation/supported-platforms.md rename to doc/manual/source/installation/supported-platforms.md diff --git a/doc/manual/src/installation/uninstall.md b/doc/manual/source/installation/uninstall.md similarity index 87% rename from doc/manual/src/installation/uninstall.md rename to doc/manual/source/installation/uninstall.md index 590327fea..47689a16e 100644 --- a/doc/manual/src/installation/uninstall.md +++ b/doc/manual/source/installation/uninstall.md @@ -19,7 +19,7 @@ If you are on Linux with systemd: Remove files created by Nix: ```console -sudo rm -rf /etc/nix /etc/profile.d/nix.sh /etc/tmpfiles.d/nix-daemon.conf /nix ~root/.nix-channels ~root/.nix-defexpr ~root/.nix-profile +sudo rm -rf /etc/nix /etc/profile.d/nix.sh /etc/tmpfiles.d/nix-daemon.conf /nix ~root/.nix-channels ~root/.nix-defexpr ~root/.nix-profile ~root/.cache/nix ``` Remove build users and their group: @@ -43,6 +43,14 @@ which you may remove. ### macOS +> **Updating to macOS 15 Sequoia** +> +> If you recently updated to macOS 15 Sequoia and are getting +> ```console +> error: the user '_nixbld1' in the group 'nixbld' does not exist +> ``` +> when running Nix commands, refer to GitHub issue [NixOS/nix#10892](https://github.com/NixOS/nix/issues/10892) for instructions to fix your installation without reinstalling. + 1. If system-wide shell initialisation files haven't been altered since installing Nix, use the backups made by the installer: ```console @@ -92,7 +100,7 @@ which you may remove. LABEL=Nix\040Store /nix apfs rw,nobrowse ``` - by setting the cursor on the respective line using the error keys, and pressing `dd`, and then `:wq` to save the file. + by setting the cursor on the respective line using the arrow keys, and pressing `dd`, and then `:wq` to save the file. This will prevent automatic mounting of the Nix Store volume. @@ -133,7 +141,9 @@ which you may remove. diskutil list ``` - If you _do_ find a "Nix Store" volume, delete it by running `diskutil deleteVolume` with the store volume's `diskXsY` identifier. + If you _do_ find a "Nix Store" volume, delete it by running `diskutil apfs deleteVolume` with the store volume's `diskXsY` identifier. + + If you get an error that the volume is in use by the kernel, reboot and immediately delete the volume before starting any other process. > **Note** > diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/source/installation/upgrading.md similarity index 100% rename from doc/manual/src/installation/upgrading.md rename to doc/manual/source/installation/upgrading.md diff --git a/doc/manual/src/introduction.md b/doc/manual/source/introduction.md similarity index 100% rename from doc/manual/src/introduction.md rename to doc/manual/source/introduction.md diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/source/language/advanced-attributes.md similarity index 100% rename from doc/manual/src/language/advanced-attributes.md rename to doc/manual/source/language/advanced-attributes.md diff --git a/doc/manual/src/language/builtins-prefix.md b/doc/manual/source/language/builtins-prefix.md similarity index 100% rename from doc/manual/src/language/builtins-prefix.md rename to doc/manual/source/language/builtins-prefix.md diff --git a/doc/manual/src/language/builtins-suffix.md b/doc/manual/source/language/builtins-suffix.md similarity index 100% rename from doc/manual/src/language/builtins-suffix.md rename to doc/manual/source/language/builtins-suffix.md diff --git a/doc/manual/source/language/constructs.md b/doc/manual/source/language/constructs.md new file mode 100644 index 000000000..41a180246 --- /dev/null +++ b/doc/manual/source/language/constructs.md @@ -0,0 +1 @@ +# Language Constructs diff --git a/doc/manual/src/language/constructs/lookup-path.md b/doc/manual/source/language/constructs/lookup-path.md similarity index 100% rename from doc/manual/src/language/constructs/lookup-path.md rename to doc/manual/source/language/constructs/lookup-path.md diff --git a/doc/manual/src/language/derivations.md b/doc/manual/source/language/derivations.md similarity index 98% rename from doc/manual/src/language/derivations.md rename to doc/manual/source/language/derivations.md index 8e3f0f791..771b2bd91 100644 --- a/doc/manual/src/language/derivations.md +++ b/doc/manual/source/language/derivations.md @@ -113,7 +113,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect ### Optional -- [`args`]{#attr-args} ([List](@docroot@/language/types.md#list) of [String](@docroot@/language/types.md#type-string)) +- [`args`]{#attr-args} ([List](@docroot@/language/types.md#type-list) of [String](@docroot@/language/types.md#type-string)) Default: `[ ]` @@ -132,7 +132,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect > }; > ``` -- [`outputs`]{#attr-outputs} ([List](@docroot@/language/types.md#list) of [String](@docroot@/language/types.md#type-string)) +- [`outputs`]{#attr-outputs} ([List](@docroot@/language/types.md#type-list) of [String](@docroot@/language/types.md#type-string)) Default: `[ "out" ]` diff --git a/doc/manual/src/language/identifiers.md b/doc/manual/source/language/identifiers.md similarity index 100% rename from doc/manual/src/language/identifiers.md rename to doc/manual/source/language/identifiers.md diff --git a/doc/manual/src/language/import-from-derivation.md b/doc/manual/source/language/import-from-derivation.md similarity index 100% rename from doc/manual/src/language/import-from-derivation.md rename to doc/manual/source/language/import-from-derivation.md diff --git a/doc/manual/src/language/index.md b/doc/manual/source/language/index.md similarity index 100% rename from doc/manual/src/language/index.md rename to doc/manual/source/language/index.md diff --git a/doc/manual/source/language/meson.build b/doc/manual/source/language/meson.build new file mode 100644 index 000000000..97469e2f3 --- /dev/null +++ b/doc/manual/source/language/meson.build @@ -0,0 +1,20 @@ +builtins_md = custom_target( + command : [ + python.full_path(), + '@INPUT0@', + '@OUTPUT@', + '--' + ] + nix_eval_for_docs + [ + '--expr', + '(builtins.readFile @INPUT3@) + import @INPUT1@ (builtins.fromJSON (builtins.readFile ./@INPUT2@)) + (builtins.readFile @INPUT4@)', + ], + input : [ + '../../remove_before_wrapper.py', + '../../generate-builtins.nix', + language_json, + 'builtins-prefix.md', + 'builtins-suffix.md' + ], + output : 'builtins.md', + env : nix_env_for_docs, +) diff --git a/doc/manual/src/language/operators.md b/doc/manual/source/language/operators.md similarity index 97% rename from doc/manual/src/language/operators.md rename to doc/manual/source/language/operators.md index 27444258a..dbf2441cb 100644 --- a/doc/manual/src/language/operators.md +++ b/doc/manual/source/language/operators.md @@ -33,7 +33,7 @@ [path]: ./types.md#type-path [number]: ./types.md#type-float [list]: ./types.md#type-list -[attribute set]: ./types.md#attribute-set +[attribute set]: ./types.md#type-attrs @@ -75,7 +75,7 @@ The result is a [Boolean] value. See also: [`builtins.hasAttr`](@docroot@/language/builtins.md#builtins-hasAttr) -[Boolean]: ./types.md#type-boolean +[Boolean]: ./types.md#type-bool [Has attribute]: #has-attribute @@ -102,7 +102,7 @@ The `+` operator is overloaded to also work on strings and paths. > > *string* `+` *string* -Concatenate two [strings][string] and merge their string contexts. +Concatenate two [strings][string] and merge their [string contexts](./string-context.md). [String concatenation]: #string-concatenation @@ -128,7 +128,7 @@ The result is a path. > **Note** > -> The string must not have a string context that refers to a [store path]. +> The string must not have a [string context](./string-context.md) that refers to a [store path]. [Path and string concatenation]: #path-and-string-concatenation diff --git a/doc/manual/src/language/scope.md b/doc/manual/source/language/scope.md similarity index 100% rename from doc/manual/src/language/scope.md rename to doc/manual/source/language/scope.md diff --git a/doc/manual/src/language/string-context.md b/doc/manual/source/language/string-context.md similarity index 100% rename from doc/manual/src/language/string-context.md rename to doc/manual/source/language/string-context.md diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/source/language/string-interpolation.md similarity index 100% rename from doc/manual/src/language/string-interpolation.md rename to doc/manual/source/language/string-interpolation.md diff --git a/doc/manual/src/language/string-literals.md b/doc/manual/source/language/string-literals.md similarity index 93% rename from doc/manual/src/language/string-literals.md rename to doc/manual/source/language/string-literals.md index 8f4b75f3e..ca064989a 100644 --- a/doc/manual/src/language/string-literals.md +++ b/doc/manual/source/language/string-literals.md @@ -150,6 +150,21 @@ These special characters are escaped as follows: `''\` escapes any other character. +A "dollar-curly" (`${`) can be written as follows: +> **Example** +> +> ```nix +> '' +> echo ''${PATH} +> '' +> ``` +> +> "echo ${PATH}\n" + +> **Note** +> +> This differs from the syntax for escaping a dollar-curly within double quotes (`"\${"`). Be aware of which one is needed at a given moment. + A "double-dollar-curly" (`$${`) can be written literally. > **Example** diff --git a/doc/manual/src/language/syntax.md b/doc/manual/source/language/syntax.md similarity index 100% rename from doc/manual/src/language/syntax.md rename to doc/manual/source/language/syntax.md diff --git a/doc/manual/src/language/types.md b/doc/manual/source/language/types.md similarity index 100% rename from doc/manual/src/language/types.md rename to doc/manual/source/language/types.md diff --git a/doc/manual/source/language/values.md b/doc/manual/source/language/values.md new file mode 100644 index 000000000..e05f56025 --- /dev/null +++ b/doc/manual/source/language/values.md @@ -0,0 +1 @@ +# Data Types diff --git a/doc/manual/src/language/variables.md b/doc/manual/source/language/variables.md similarity index 100% rename from doc/manual/src/language/variables.md rename to doc/manual/source/language/variables.md diff --git a/doc/manual/source/meson.build b/doc/manual/source/meson.build new file mode 100644 index 000000000..098a29897 --- /dev/null +++ b/doc/manual/source/meson.build @@ -0,0 +1,17 @@ +summary_rl_next = custom_target( + command : [ + bash, + '-euo', 'pipefail', + '-c', + ''' + if [ -e "@INPUT@" ]; then + echo ' - [Upcoming release](release-notes/rl-next.md)' + fi + ''', + ], + input : [ + rl_next_generated, + ], + capture: true, + output : 'SUMMARY-rl-next.md', +) diff --git a/doc/manual/src/package-management/binary-cache-substituter.md b/doc/manual/source/package-management/binary-cache-substituter.md similarity index 100% rename from doc/manual/src/package-management/binary-cache-substituter.md rename to doc/manual/source/package-management/binary-cache-substituter.md diff --git a/doc/manual/src/package-management/garbage-collection.md b/doc/manual/source/package-management/garbage-collection.md similarity index 100% rename from doc/manual/src/package-management/garbage-collection.md rename to doc/manual/source/package-management/garbage-collection.md diff --git a/doc/manual/src/package-management/garbage-collector-roots.md b/doc/manual/source/package-management/garbage-collector-roots.md similarity index 100% rename from doc/manual/src/package-management/garbage-collector-roots.md rename to doc/manual/source/package-management/garbage-collector-roots.md diff --git a/doc/manual/src/package-management/index.md b/doc/manual/source/package-management/index.md similarity index 100% rename from doc/manual/src/package-management/index.md rename to doc/manual/source/package-management/index.md diff --git a/doc/manual/src/package-management/profiles.md b/doc/manual/source/package-management/profiles.md similarity index 100% rename from doc/manual/src/package-management/profiles.md rename to doc/manual/source/package-management/profiles.md diff --git a/doc/manual/src/package-management/sharing-packages.md b/doc/manual/source/package-management/sharing-packages.md similarity index 100% rename from doc/manual/src/package-management/sharing-packages.md rename to doc/manual/source/package-management/sharing-packages.md diff --git a/doc/manual/src/package-management/ssh-substituter.md b/doc/manual/source/package-management/ssh-substituter.md similarity index 100% rename from doc/manual/src/package-management/ssh-substituter.md rename to doc/manual/source/package-management/ssh-substituter.md diff --git a/doc/manual/src/protocols/derivation-aterm.md b/doc/manual/source/protocols/derivation-aterm.md similarity index 100% rename from doc/manual/src/protocols/derivation-aterm.md rename to doc/manual/source/protocols/derivation-aterm.md diff --git a/doc/manual/src/protocols/index.md b/doc/manual/source/protocols/index.md similarity index 100% rename from doc/manual/src/protocols/index.md rename to doc/manual/source/protocols/index.md diff --git a/doc/manual/src/protocols/json/derivation.md b/doc/manual/source/protocols/json/derivation.md similarity index 100% rename from doc/manual/src/protocols/json/derivation.md rename to doc/manual/source/protocols/json/derivation.md diff --git a/doc/manual/source/protocols/json/index.md b/doc/manual/source/protocols/json/index.md new file mode 100644 index 000000000..1fcd1e62d --- /dev/null +++ b/doc/manual/source/protocols/json/index.md @@ -0,0 +1 @@ +# JSON Formats diff --git a/doc/manual/src/protocols/json/store-object-info.md b/doc/manual/source/protocols/json/store-object-info.md similarity index 100% rename from doc/manual/src/protocols/json/store-object-info.md rename to doc/manual/source/protocols/json/store-object-info.md diff --git a/doc/manual/src/protocols/nix-archive.md b/doc/manual/source/protocols/nix-archive.md similarity index 100% rename from doc/manual/src/protocols/nix-archive.md rename to doc/manual/source/protocols/nix-archive.md diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/source/protocols/store-path.md similarity index 97% rename from doc/manual/src/protocols/store-path.md rename to doc/manual/source/protocols/store-path.md index 52352d358..8ec6f8201 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/source/protocols/store-path.md @@ -82,7 +82,7 @@ where - if `type` = `"source:" ...`: - the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object. + the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object. - if `type` = `"output:" id`: diff --git a/doc/manual/src/protocols/tarball-fetcher.md b/doc/manual/source/protocols/tarball-fetcher.md similarity index 100% rename from doc/manual/src/protocols/tarball-fetcher.md rename to doc/manual/source/protocols/tarball-fetcher.md diff --git a/doc/manual/src/quick-start.md b/doc/manual/source/quick-start.md similarity index 100% rename from doc/manual/src/quick-start.md rename to doc/manual/source/quick-start.md diff --git a/doc/manual/src/release-notes/index.md b/doc/manual/source/release-notes/index.md similarity index 100% rename from doc/manual/src/release-notes/index.md rename to doc/manual/source/release-notes/index.md diff --git a/doc/manual/source/release-notes/meson.build b/doc/manual/source/release-notes/meson.build new file mode 100644 index 000000000..d8bf154e1 --- /dev/null +++ b/doc/manual/source/release-notes/meson.build @@ -0,0 +1,24 @@ +rl_next_generated = custom_target( + command : [ + 'bash', + '-euo', + 'pipefail', + '-c', + ''' + if type -p build-release-notes > /dev/null; then + build-release-notes --change-authors @CURRENT_SOURCE_DIR@/../../change-authors.yml @CURRENT_SOURCE_DIR@/../../rl-next + elif type -p changelog-d > /dev/null; then + changelog-d @CURRENT_SOURCE_DIR@/../../rl-next + fi + @0@ @INPUT0@ @CURRENT_SOURCE_DIR@/../../rl-next > @DEPFILE@ + '''.format( + python.full_path(), + ), + ], + input : [ + generate_manual_deps, + ], + output : 'rl-next.md', + capture : true, + depfile : 'rl-next.d', +) diff --git a/doc/manual/src/release-notes/rl-0.10.1.md b/doc/manual/source/release-notes/rl-0.10.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.10.1.md rename to doc/manual/source/release-notes/rl-0.10.1.md diff --git a/doc/manual/src/release-notes/rl-0.10.md b/doc/manual/source/release-notes/rl-0.10.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.10.md rename to doc/manual/source/release-notes/rl-0.10.md diff --git a/doc/manual/src/release-notes/rl-0.11.md b/doc/manual/source/release-notes/rl-0.11.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.11.md rename to doc/manual/source/release-notes/rl-0.11.md diff --git a/doc/manual/src/release-notes/rl-0.12.md b/doc/manual/source/release-notes/rl-0.12.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.12.md rename to doc/manual/source/release-notes/rl-0.12.md diff --git a/doc/manual/src/release-notes/rl-0.13.md b/doc/manual/source/release-notes/rl-0.13.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.13.md rename to doc/manual/source/release-notes/rl-0.13.md diff --git a/doc/manual/src/release-notes/rl-0.14.md b/doc/manual/source/release-notes/rl-0.14.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.14.md rename to doc/manual/source/release-notes/rl-0.14.md diff --git a/doc/manual/src/release-notes/rl-0.15.md b/doc/manual/source/release-notes/rl-0.15.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.15.md rename to doc/manual/source/release-notes/rl-0.15.md diff --git a/doc/manual/src/release-notes/rl-0.16.md b/doc/manual/source/release-notes/rl-0.16.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.16.md rename to doc/manual/source/release-notes/rl-0.16.md diff --git a/doc/manual/src/release-notes/rl-0.5.md b/doc/manual/source/release-notes/rl-0.5.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.5.md rename to doc/manual/source/release-notes/rl-0.5.md diff --git a/doc/manual/src/release-notes/rl-0.6.md b/doc/manual/source/release-notes/rl-0.6.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.6.md rename to doc/manual/source/release-notes/rl-0.6.md diff --git a/doc/manual/src/release-notes/rl-0.7.md b/doc/manual/source/release-notes/rl-0.7.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.7.md rename to doc/manual/source/release-notes/rl-0.7.md diff --git a/doc/manual/src/release-notes/rl-0.8.1.md b/doc/manual/source/release-notes/rl-0.8.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.8.1.md rename to doc/manual/source/release-notes/rl-0.8.1.md diff --git a/doc/manual/src/release-notes/rl-0.8.md b/doc/manual/source/release-notes/rl-0.8.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.8.md rename to doc/manual/source/release-notes/rl-0.8.md diff --git a/doc/manual/src/release-notes/rl-0.9.1.md b/doc/manual/source/release-notes/rl-0.9.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.9.1.md rename to doc/manual/source/release-notes/rl-0.9.1.md diff --git a/doc/manual/src/release-notes/rl-0.9.2.md b/doc/manual/source/release-notes/rl-0.9.2.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.9.2.md rename to doc/manual/source/release-notes/rl-0.9.2.md diff --git a/doc/manual/src/release-notes/rl-0.9.md b/doc/manual/source/release-notes/rl-0.9.md similarity index 100% rename from doc/manual/src/release-notes/rl-0.9.md rename to doc/manual/source/release-notes/rl-0.9.md diff --git a/doc/manual/src/release-notes/rl-1.0.md b/doc/manual/source/release-notes/rl-1.0.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.0.md rename to doc/manual/source/release-notes/rl-1.0.md diff --git a/doc/manual/src/release-notes/rl-1.1.md b/doc/manual/source/release-notes/rl-1.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.1.md rename to doc/manual/source/release-notes/rl-1.1.md diff --git a/doc/manual/src/release-notes/rl-1.10.md b/doc/manual/source/release-notes/rl-1.10.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.10.md rename to doc/manual/source/release-notes/rl-1.10.md diff --git a/doc/manual/src/release-notes/rl-1.11.10.md b/doc/manual/source/release-notes/rl-1.11.10.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.11.10.md rename to doc/manual/source/release-notes/rl-1.11.10.md diff --git a/doc/manual/src/release-notes/rl-1.11.md b/doc/manual/source/release-notes/rl-1.11.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.11.md rename to doc/manual/source/release-notes/rl-1.11.md diff --git a/doc/manual/src/release-notes/rl-1.2.md b/doc/manual/source/release-notes/rl-1.2.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.2.md rename to doc/manual/source/release-notes/rl-1.2.md diff --git a/doc/manual/src/release-notes/rl-1.3.md b/doc/manual/source/release-notes/rl-1.3.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.3.md rename to doc/manual/source/release-notes/rl-1.3.md diff --git a/doc/manual/src/release-notes/rl-1.4.md b/doc/manual/source/release-notes/rl-1.4.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.4.md rename to doc/manual/source/release-notes/rl-1.4.md diff --git a/doc/manual/src/release-notes/rl-1.5.1.md b/doc/manual/source/release-notes/rl-1.5.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.5.1.md rename to doc/manual/source/release-notes/rl-1.5.1.md diff --git a/doc/manual/src/release-notes/rl-1.5.2.md b/doc/manual/source/release-notes/rl-1.5.2.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.5.2.md rename to doc/manual/source/release-notes/rl-1.5.2.md diff --git a/doc/manual/src/release-notes/rl-1.5.md b/doc/manual/source/release-notes/rl-1.5.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.5.md rename to doc/manual/source/release-notes/rl-1.5.md diff --git a/doc/manual/src/release-notes/rl-1.6.1.md b/doc/manual/source/release-notes/rl-1.6.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.6.1.md rename to doc/manual/source/release-notes/rl-1.6.1.md diff --git a/doc/manual/src/release-notes/rl-1.6.md b/doc/manual/source/release-notes/rl-1.6.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.6.md rename to doc/manual/source/release-notes/rl-1.6.md diff --git a/doc/manual/src/release-notes/rl-1.7.md b/doc/manual/source/release-notes/rl-1.7.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.7.md rename to doc/manual/source/release-notes/rl-1.7.md diff --git a/doc/manual/src/release-notes/rl-1.8.md b/doc/manual/source/release-notes/rl-1.8.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.8.md rename to doc/manual/source/release-notes/rl-1.8.md diff --git a/doc/manual/src/release-notes/rl-1.9.md b/doc/manual/source/release-notes/rl-1.9.md similarity index 100% rename from doc/manual/src/release-notes/rl-1.9.md rename to doc/manual/source/release-notes/rl-1.9.md diff --git a/doc/manual/src/release-notes/rl-2.0.md b/doc/manual/source/release-notes/rl-2.0.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.0.md rename to doc/manual/source/release-notes/rl-2.0.md diff --git a/doc/manual/src/release-notes/rl-2.1.md b/doc/manual/source/release-notes/rl-2.1.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.1.md rename to doc/manual/source/release-notes/rl-2.1.md diff --git a/doc/manual/src/release-notes/rl-2.10.md b/doc/manual/source/release-notes/rl-2.10.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.10.md rename to doc/manual/source/release-notes/rl-2.10.md diff --git a/doc/manual/src/release-notes/rl-2.11.md b/doc/manual/source/release-notes/rl-2.11.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.11.md rename to doc/manual/source/release-notes/rl-2.11.md diff --git a/doc/manual/src/release-notes/rl-2.12.md b/doc/manual/source/release-notes/rl-2.12.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.12.md rename to doc/manual/source/release-notes/rl-2.12.md diff --git a/doc/manual/src/release-notes/rl-2.13.md b/doc/manual/source/release-notes/rl-2.13.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.13.md rename to doc/manual/source/release-notes/rl-2.13.md diff --git a/doc/manual/src/release-notes/rl-2.14.md b/doc/manual/source/release-notes/rl-2.14.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.14.md rename to doc/manual/source/release-notes/rl-2.14.md diff --git a/doc/manual/src/release-notes/rl-2.15.md b/doc/manual/source/release-notes/rl-2.15.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.15.md rename to doc/manual/source/release-notes/rl-2.15.md diff --git a/doc/manual/src/release-notes/rl-2.16.md b/doc/manual/source/release-notes/rl-2.16.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.16.md rename to doc/manual/source/release-notes/rl-2.16.md diff --git a/doc/manual/src/release-notes/rl-2.17.md b/doc/manual/source/release-notes/rl-2.17.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.17.md rename to doc/manual/source/release-notes/rl-2.17.md diff --git a/doc/manual/src/release-notes/rl-2.18.md b/doc/manual/source/release-notes/rl-2.18.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.18.md rename to doc/manual/source/release-notes/rl-2.18.md diff --git a/doc/manual/src/release-notes/rl-2.19.md b/doc/manual/source/release-notes/rl-2.19.md similarity index 94% rename from doc/manual/src/release-notes/rl-2.19.md rename to doc/manual/source/release-notes/rl-2.19.md index e2e2f85cc..e6a93c7ea 100644 --- a/doc/manual/src/release-notes/rl-2.19.md +++ b/doc/manual/source/release-notes/rl-2.19.md @@ -75,3 +75,7 @@ (experimental) can be found by any program that follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html). - A new command `nix store add` has been added. It replaces `nix store add-file` and `nix store add-path` which are now deprecated. + +- A new option [`always-allow-substitutes`](@docroot@/command-ref/conf-file.md#conf-always-allow-substitutes) has been added. + + When set to `true`, Nix will always try to substitute a derivation, even if it has the [`allowSubstitutes`]{#adv-attr-allowSubstitutes} attribute set to `false`. diff --git a/doc/manual/src/release-notes/rl-2.2.md b/doc/manual/source/release-notes/rl-2.2.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.2.md rename to doc/manual/source/release-notes/rl-2.2.md diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/source/release-notes/rl-2.20.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.20.md rename to doc/manual/source/release-notes/rl-2.20.md diff --git a/doc/manual/src/release-notes/rl-2.21.md b/doc/manual/source/release-notes/rl-2.21.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.21.md rename to doc/manual/source/release-notes/rl-2.21.md diff --git a/doc/manual/src/release-notes/rl-2.22.md b/doc/manual/source/release-notes/rl-2.22.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.22.md rename to doc/manual/source/release-notes/rl-2.22.md diff --git a/doc/manual/src/release-notes/rl-2.23.md b/doc/manual/source/release-notes/rl-2.23.md similarity index 96% rename from doc/manual/src/release-notes/rl-2.23.md rename to doc/manual/source/release-notes/rl-2.23.md index ac842fdc0..249c183e6 100644 --- a/doc/manual/src/release-notes/rl-2.23.md +++ b/doc/manual/source/release-notes/rl-2.23.md @@ -85,7 +85,7 @@ - Store object info JSON format now uses `null` rather than omitting fields [#9995](https://github.com/NixOS/nix/pull/9995) The [store object info JSON format](@docroot@/protocols/json/store-object-info.md), used for e.g. `nix path-info`, no longer omits fields to indicate absent information, but instead includes the fields with a `null` value. - For example, `"ca": null` is used to to indicate a store object that isn't content-addressed rather than omitting the `ca` field entirely. + For example, `"ca": null` is used to indicate a store object that isn't content-addressed rather than omitting the `ca` field entirely. This makes records of this sort more self-describing, and easier to consume programmatically. We will follow this design principle going forward; @@ -96,7 +96,7 @@ Nix can now warn when evaluation of a Nix expression causes a large path to be copied to the Nix store. The threshold for this warning can be configured using [the `warn-large-path-threshold` - setting](@docroot@/command-ref/conf-file.md#warn-large-path-threshold), + setting](@docroot@/command-ref/conf-file.md#conf-warn-large-path-threshold), e.g. `--warn-large-path-threshold 100M` will warn about paths larger than 100 MiB. diff --git a/doc/manual/src/release-notes/rl-2.24.md b/doc/manual/source/release-notes/rl-2.24.md similarity index 97% rename from doc/manual/src/release-notes/rl-2.24.md rename to doc/manual/source/release-notes/rl-2.24.md index 5bcc1d79c..08ec65be9 100644 --- a/doc/manual/src/release-notes/rl-2.24.md +++ b/doc/manual/source/release-notes/rl-2.24.md @@ -261,12 +261,6 @@ Author: [**Eelco Dolstra (@edolstra)**](https://github.com/edolstra) -- Improve handling of tarballs that don't consist of a single top-level directory [#11195](https://github.com/NixOS/nix/pull/11195) - - In previous Nix releases, the tarball fetcher (used by `builtins.fetchTarball`) erroneously merged top-level directories into a single directory, and silently discarded top-level files that are not directories. This is no longer the case. - - Author: [**Eelco Dolstra (@edolstra)**](https://github.com/edolstra) - - Setting to warn about large paths [#10778](https://github.com/NixOS/nix/pull/10778) Nix can now warn when evaluation of a Nix expression causes a large diff --git a/doc/manual/src/release-notes/rl-2.3.md b/doc/manual/source/release-notes/rl-2.3.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.3.md rename to doc/manual/source/release-notes/rl-2.3.md diff --git a/doc/manual/src/release-notes/rl-2.4.md b/doc/manual/source/release-notes/rl-2.4.md similarity index 99% rename from doc/manual/src/release-notes/rl-2.4.md rename to doc/manual/source/release-notes/rl-2.4.md index 1201e53b6..dbec5a29d 100644 --- a/doc/manual/src/release-notes/rl-2.4.md +++ b/doc/manual/source/release-notes/rl-2.4.md @@ -141,6 +141,8 @@ more than 2800 commits from 195 contributors since release 2.3. the evaluation cache. This is made possible by the hermetic evaluation model of flakes. + Intermediate results are not cached. + * The new `--offline` flag disables substituters and causes all locally cached tarballs and repositories to be considered up-to-date. diff --git a/doc/manual/src/release-notes/rl-2.5.md b/doc/manual/source/release-notes/rl-2.5.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.5.md rename to doc/manual/source/release-notes/rl-2.5.md diff --git a/doc/manual/src/release-notes/rl-2.6.md b/doc/manual/source/release-notes/rl-2.6.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.6.md rename to doc/manual/source/release-notes/rl-2.6.md diff --git a/doc/manual/src/release-notes/rl-2.7.md b/doc/manual/source/release-notes/rl-2.7.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.7.md rename to doc/manual/source/release-notes/rl-2.7.md diff --git a/doc/manual/src/release-notes/rl-2.8.md b/doc/manual/source/release-notes/rl-2.8.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.8.md rename to doc/manual/source/release-notes/rl-2.8.md diff --git a/doc/manual/src/release-notes/rl-2.9.md b/doc/manual/source/release-notes/rl-2.9.md similarity index 100% rename from doc/manual/src/release-notes/rl-2.9.md rename to doc/manual/source/release-notes/rl-2.9.md diff --git a/doc/manual/src/store/file-system-object.md b/doc/manual/source/store/file-system-object.md similarity index 100% rename from doc/manual/src/store/file-system-object.md rename to doc/manual/source/store/file-system-object.md diff --git a/doc/manual/src/store/file-system-object/content-address.md b/doc/manual/source/store/file-system-object/content-address.md similarity index 97% rename from doc/manual/src/store/file-system-object/content-address.md rename to doc/manual/source/store/file-system-object/content-address.md index 410d7fb7c..72b087fe9 100644 --- a/doc/manual/src/store/file-system-object/content-address.md +++ b/doc/manual/source/store/file-system-object/content-address.md @@ -1,13 +1,13 @@ # Content-Addressing File System Objects -For many operations, Nix needs to calculate [a content addresses](@docroot@/glossary.md#gloss-content-address) of [a file system object][file system object]. +For many operations, Nix needs to calculate [a content addresses](@docroot@/glossary.md#gloss-content-address) of [a file system object][file system object] (FSO). Usually this is needed as part of [content addressing store objects](../store-object/content-address.md), since store objects always have a root file system object. But some command-line utilities also just work on "raw" file system objects, not part of any store object. Every content addressing scheme Nix uses ultimately involves feeding data into a [hash function](https://en.wikipedia.org/wiki/Hash_function), and getting back an opaque fixed-size digest which is deemed a content address. -The various *methods* of content addressing thus differ in how abstract data (in this case, a file system object and its descendents) are fed into the hash function. +The various *methods* of content addressing thus differ in how abstract data (in this case, a file system object and its descendants) are fed into the hash function. ## Serialising File System Objects { #serial } @@ -25,7 +25,7 @@ For example, Unix commands like `sha256sum` or `sha1sum` will produce hashes for ### Nix Archive (NAR) { #serial-nix-archive } -For the other cases of [file system objects][file system object], especially directories with arbitrary descendents, we need a more complex serialisation format. +For the other cases of [file system objects][file system object], especially directories with arbitrary descendants, we need a more complex serialisation format. Examples of such serialisations are the ZIP and TAR file formats. However, for our purposes these formats have two problems: diff --git a/doc/manual/src/store/index.md b/doc/manual/source/store/index.md similarity index 100% rename from doc/manual/src/store/index.md rename to doc/manual/source/store/index.md diff --git a/doc/manual/source/store/meson.build b/doc/manual/source/store/meson.build new file mode 100644 index 000000000..e3006020d --- /dev/null +++ b/doc/manual/source/store/meson.build @@ -0,0 +1,18 @@ +types_dir = custom_target( + command : [ + python.full_path(), + '@INPUT0@', + '@OUTPUT@', + '--' + ] + nix_eval_for_docs + [ + '--expr', + 'import @INPUT1@ (builtins.fromJSON (builtins.readFile ./@INPUT2@)).stores', + ], + input : [ + '../../remove_before_wrapper.py', + '../../generate-store-types.nix', + nix3_cli_json, + ], + output : 'types', + env : nix_env_for_docs, +) diff --git a/doc/manual/src/store/store-object.md b/doc/manual/source/store/store-object.md similarity index 100% rename from doc/manual/src/store/store-object.md rename to doc/manual/source/store/store-object.md diff --git a/doc/manual/src/store/store-object/content-address.md b/doc/manual/source/store/store-object/content-address.md similarity index 100% rename from doc/manual/src/store/store-object/content-address.md rename to doc/manual/source/store/store-object/content-address.md diff --git a/doc/manual/src/store/store-path.md b/doc/manual/source/store/store-path.md similarity index 100% rename from doc/manual/src/store/store-path.md rename to doc/manual/source/store/store-path.md diff --git a/doc/manual/src/store/types/index.md.in b/doc/manual/source/store/types/index.md.in similarity index 100% rename from doc/manual/src/store/types/index.md.in rename to doc/manual/source/store/types/index.md.in diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md deleted file mode 100644 index ddabaeb4d..000000000 --- a/doc/manual/src/advanced-topics/distributed-builds.md +++ /dev/null @@ -1,71 +0,0 @@ -# Remote Builds - -Nix supports remote builds, where a local Nix installation can forward -Nix builds to other machines. This allows multiple builds to be -performed in parallel and allows Nix to perform multi-platform builds in -a semi-transparent way. For instance, if you perform a build for a -`x86_64-darwin` on an `i686-linux` machine, Nix can automatically -forward the build to a `x86_64-darwin` machine, if available. - -To forward a build to a remote machine, it’s required that the remote -machine is accessible via SSH and that it has Nix installed. You can -test whether connecting to the remote Nix instance works, e.g. - -```console -$ nix store ping --store ssh://mac -``` - -will try to connect to the machine named `mac`. It is possible to -specify an SSH identity file as part of the remote store URI, e.g. - -```console -$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key -``` - -Since builds should be non-interactive, the key should not have a -passphrase. Alternatively, you can load identities ahead of time into -`ssh-agent` or `gpg-agent`. - -If you get the error - -```console -bash: nix-store: command not found -error: cannot connect to 'mac' -``` - -then you need to ensure that the `PATH` of non-interactive login shells -contains Nix. - -The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file. -For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine: - -```console -$ uname -Linux - -$ nix build --impure \ - --expr '(with import { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \ - --builders 'ssh://mac x86_64-darwin' -[1/0/1 built, 0.0 MiB DL] building foo on ssh://mac - -$ cat ./result -Darwin -``` - -It is possible to specify multiple build machines separated by a semicolon or a newline, e.g. - -```console - --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd' -``` - -Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g. - - builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd - -Finally, remote build machines can be configured in a separate configuration -file included in `builders` via the syntax `@/path/to/file`. For example, - - builders = @/etc/nix/machines - -causes the list of machines in `/etc/nix/machines` to be included. -(This is the default.) diff --git a/doc/manual/substitute.py b/doc/manual/substitute.py new file mode 100644 index 000000000..a8b11d932 --- /dev/null +++ b/doc/manual/substitute.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +from pathlib import Path +import json +import os, os.path +import sys +import typing as t + +name = 'substitute.py' + +def log(*args: t.Any, **kwargs: t.Any) -> None: + kwargs['file'] = sys.stderr + print(f'{name}:', *args, **kwargs) + +def do_include(content: str, relative_md_path: Path, source_root: Path, search_path: Path) -> str: + assert not relative_md_path.is_absolute(), f'{relative_md_path=} from mdbook should be relative' + + md_path_abs = source_root / relative_md_path + var_abs = md_path_abs.parent + assert var_abs.is_dir(), f'supposed directory {var_abs} is not a directory (cwd={os.getcwd()})' + + lines = [] + for l in content.splitlines(keepends=True): + if l.strip().startswith("{{#include "): + requested = l.strip()[11:][:-2] + if requested.startswith("@generated@/"): + included = search_path / Path(requested[12:]) + requested = included.relative_to(search_path) + else: + included = source_root / relative_md_path.parent / requested + requested = included.resolve().relative_to(source_root) + assert included.exists(), f"{requested} not found at {included}" + lines.append(do_include(included.read_text(), requested, source_root, search_path) + "\n") + else: + lines.append(l) + return "".join(lines) + +def recursive_replace(data: dict[str, t.Any], book_root: Path, search_path: Path) -> dict[str, t.Any]: + match data: + case {'sections': sections}: + return data | dict( + sections = [recursive_replace(section, book_root, search_path) for section in sections], + ) + case {'Chapter': chapter}: + path_to_chapter = Path(chapter['path']) + chapter_content = chapter['content'] + + return data | dict( + Chapter = chapter | dict( + # first process includes. this must happen before docroot processing since + # mdbook does not see these included files, only the final agglomeration. + content = do_include( + chapter_content, + path_to_chapter, + book_root, + search_path + ).replace( + '@docroot@', + ("../" * len(path_to_chapter.parent.parts) or "./")[:-1] + ), + sub_items = [ + recursive_replace(sub_item, book_root, search_path) + for sub_item in chapter['sub_items'] + ], + ), + ) + + case rest: + assert False, f'should have been called on a dict, not {type(rest)=}\n\t{rest=}' + +def main() -> None: + + + if len(sys.argv) > 1 and sys.argv[1] == 'supports': + return 0 + + # includes pointing into @generated@ will look here + search_path = Path(os.environ['MDBOOK_SUBSTITUTE_SEARCH']) + + if len(sys.argv) > 1 and sys.argv[1] == 'summary': + print(do_include( + sys.stdin.read(), + Path('source/SUMMARY.md'), + Path(sys.argv[2]).resolve(), + search_path)) + return + + # mdbook communicates with us over stdin and stdout. + # It splorks us a JSON array, the first element describing the context, + # the second element describing the book itself, + # and then expects us to send it the modified book JSON over stdout. + + context, book = json.load(sys.stdin) + + # book_root is the directory where book contents leave (ie, source/) + book_root = Path(context['root']) / context['config']['book']['src'] + + # Find @var@ in all parts of our recursive book structure. + replaced_content = recursive_replace(book, book_root, search_path) + + replaced_content_str = json.dumps(replaced_content) + + # Give mdbook our changes. + print(replaced_content_str) + +try: + sys.exit(main()) +except AssertionError as e: + print(f'{name}: INTERNAL ERROR in mdbook preprocessor: {e}', file=sys.stderr) + print(f'this is a bug in {name}', file=sys.stderr) + raise diff --git a/flake.nix b/flake.nix index a154ceb7b..cc40c03c1 100644 --- a/flake.nix +++ b/flake.nix @@ -194,13 +194,15 @@ # system, we should reenable this. #perlBindings = self.hydraJobs.perlBindings.${system}; } + /* # Add "passthru" tests // flatMapAttrs ({ "" = nixpkgsFor.${system}.native; } // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { # TODO: enable static builds for darwin, blocked on: # https://github.com/NixOS/nixpkgs/issues/320448 - "static-" = nixpkgsFor.${system}.static; + # TODO: disabled to speed up GHA CI. + #"static-" = nixpkgsFor.${system}.static; }) (nixpkgsPrefix: nixpkgs: flatMapAttrs nixpkgs.nixComponents @@ -214,6 +216,7 @@ "${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests; } ) + */ // devFlake.checks.${system} or {} ); @@ -223,6 +226,7 @@ inherit (nixpkgsFor.${system}.native) changelog-d; default = self.packages.${system}.nix-ng; + nix-manual = nixpkgsFor.${system}.native.nixComponents.nix-manual; nix-internal-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-internal-api-docs; nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs; } @@ -295,107 +299,24 @@ }); devShells = let - makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: - let - modular = devFlake.getSystem stdenv.buildPlatform.system; - transformFlag = prefix: flag: - assert builtins.isString flag; - let - rest = builtins.substring 2 (builtins.stringLength flag) flag; - in - "-D${prefix}:${rest}"; - havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; - ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; - in { - pname = "shell-for-" + attrs.pname; - - # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop - version = lib.fileContents ./.version; - name = attrs.pname; - - installFlags = "sysconfdir=$(out)/etc"; - shellHook = '' - PATH=$prefix/bin:$PATH - unset PYTHONPATH - export MANPATH=$out/share/man:$MANPATH - - # Make bash completion work. - XDG_DATA_DIRS+=:$out/share - ''; - - # We use this shell with the local checkout, not unpackPhase. - src = null; - - env = { - # Needed for Meson to find Boost. - # https://github.com/NixOS/nixpkgs/issues/86131. - BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include"; - BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib"; - # For `make format`, to work without installing pre-commit - _NIX_PRE_COMMIT_HOOKS_CONFIG = - "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}"; - }; - - mesonFlags = - map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags) - ++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags) - ++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags) - ++ lib.optionals havePerl (map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)) - ++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags) - ++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags) - ; - - nativeBuildInputs = attrs.nativeBuildInputs or [] - ++ pkgs.nixComponents.nix-util.nativeBuildInputs - ++ pkgs.nixComponents.nix-store.nativeBuildInputs - ++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs - ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs - ++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs - ++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs - ++ pkgs.nixComponents.nix-functional-tests.baseNativeBuildInputs - ++ lib.optional - (!stdenv.buildPlatform.canExecute stdenv.hostPlatform - # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 - && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) - && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages - && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)) - pkgs.buildPackages.mesonEmulatorHook - ++ [ - pkgs.buildPackages.cmake - pkgs.buildPackages.shellcheck - pkgs.buildPackages.changelog-d - modular.pre-commit.settings.package - (pkgs.writeScriptBin "pre-commit-hooks-install" - modular.pre-commit.settings.installationScript) - ] - # TODO: Remove the darwin check once - # https://github.com/NixOS/nixpkgs/pull/291814 is available - ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear - ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools; - - buildInputs = attrs.buildInputs or [] - ++ [ - pkgs.gtest - pkgs.rapidcheck - ] - ++ lib.optional havePerl pkgs.perl - ; - }); - in + makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; + prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; }); + in forAllSystems (system: - let - makeShells = prefix: pkgs: - lib.mapAttrs' - (k: v: lib.nameValuePair "${prefix}-${k}" v) - (forAllStdenvs (stdenvName: makeShell pkgs pkgs.${stdenvName})); - in - (makeShells "native" nixpkgsFor.${system}.native) // - (lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) - (makeShells "static" nixpkgsFor.${system}.static) // - (forAllCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv))) // - { - default = self.devShells.${system}.native-stdenvPackages; - } + prefixAttrs "native" (forAllStdenvs (stdenvName: makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages"; + })) // + lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) ( + prefixAttrs "static" (forAllStdenvs (stdenvName: makeShell { + pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic; + })) // + prefixAttrs "cross" (forAllCrossSystems (crossSystem: makeShell { + pkgs = nixpkgsFor.${system}.cross.${crossSystem}; + })) + ) // + { + default = self.devShells.${system}.native-stdenvPackages; + } ); herculesCI.ciSystems = [ "x86_64-linux" diff --git a/maintainers/README.md b/maintainers/README.md index b92833497..fd9bbed8e 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -29,11 +29,7 @@ We aim to achieve this by improving the contributor experience and attracting mo ## Members -- Eelco Dolstra (@edolstra) – Team lead -- Valentin Gagarin (@fricklerhandwerk) -- Thomas Bereknyei (@tomberek) -- Robert Hensing (@roberth) -- John Ericson (@Ericson2314) +See https://nixos.org/community/teams/nix/ for the current team membership. The team is on Github as [@NixOS/nix-team](https://github.com/orgs/NixOS/teams/nix-team). @@ -63,6 +59,9 @@ Team meetings are generally open to anyone interested. We can make exceptions to discuss sensitive issues, such as security incidents or people matters. Contact any team member to get a calendar invite for reminders and updates. +> [!IMPORTANT] +> [Handling security reports](./security-reports.md) always takes priority. + ## Project board protocol The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19/views/1) for tracking its work. diff --git a/maintainers/flake-module.nix b/maintainers/flake-module.nix index 0b83e5696..fdb031302 100644 --- a/maintainers/flake-module.nix +++ b/maintainers/flake-module.nix @@ -7,7 +7,7 @@ perSystem = { config, pkgs, ... }: { - # https://flake.parts/options/pre-commit-hooks-nix.html#options + # https://flake.parts/options/git-hooks-nix#options pre-commit.settings = { hooks = { clang-format = { @@ -15,7 +15,7 @@ excludes = [ # We don't want to format test data # ''tests/(?!nixos/).*\.nix'' - ''^tests/unit/[^/]*/data/.*$'' + ''^src/[^/]*-tests/data/.*$'' # Don't format vendored code ''^doc/manual/redirects\.js$'' @@ -28,8 +28,6 @@ ''^src/build-remote/build-remote\.cc$'' ''^src/libcmd/built-path\.cc$'' ''^src/libcmd/built-path\.hh$'' - ''^src/libcmd/command\.cc$'' - ''^src/libcmd/command\.hh$'' ''^src/libcmd/common-eval-args\.cc$'' ''^src/libcmd/common-eval-args\.hh$'' ''^src/libcmd/editor-for\.cc$'' @@ -427,64 +425,64 @@ ''^tests/nixos/ca-fd-leak/sender\.c'' ''^tests/nixos/ca-fd-leak/smuggler\.c'' ''^tests/nixos/user-sandboxing/attacker\.c'' - ''^tests/unit/libexpr-support/tests/libexpr\.hh'' - ''^tests/unit/libexpr-support/tests/value/context\.cc'' - ''^tests/unit/libexpr-support/tests/value/context\.hh'' - ''^tests/unit/libexpr/derived-path\.cc'' - ''^tests/unit/libexpr/error_traces\.cc'' - ''^tests/unit/libexpr/eval\.cc'' - ''^tests/unit/libexpr/json\.cc'' - ''^tests/unit/libexpr/main\.cc'' - ''^tests/unit/libexpr/primops\.cc'' - ''^tests/unit/libexpr/search-path\.cc'' - ''^tests/unit/libexpr/trivial\.cc'' - ''^tests/unit/libexpr/value/context\.cc'' - ''^tests/unit/libexpr/value/print\.cc'' - ''^tests/unit/libfetchers/public-key\.cc'' - ''^tests/unit/libflake/flakeref\.cc'' - ''^tests/unit/libflake/url-name\.cc'' - ''^tests/unit/libstore-support/tests/derived-path\.cc'' - ''^tests/unit/libstore-support/tests/derived-path\.hh'' - ''^tests/unit/libstore-support/tests/nix_api_store\.hh'' - ''^tests/unit/libstore-support/tests/outputs-spec\.cc'' - ''^tests/unit/libstore-support/tests/outputs-spec\.hh'' - ''^tests/unit/libstore-support/tests/path\.cc'' - ''^tests/unit/libstore-support/tests/path\.hh'' - ''^tests/unit/libstore-support/tests/protocol\.hh'' - ''^tests/unit/libstore/common-protocol\.cc'' - ''^tests/unit/libstore/content-address\.cc'' - ''^tests/unit/libstore/derivation\.cc'' - ''^tests/unit/libstore/derived-path\.cc'' - ''^tests/unit/libstore/downstream-placeholder\.cc'' - ''^tests/unit/libstore/machines\.cc'' - ''^tests/unit/libstore/nar-info-disk-cache\.cc'' - ''^tests/unit/libstore/nar-info\.cc'' - ''^tests/unit/libstore/outputs-spec\.cc'' - ''^tests/unit/libstore/path-info\.cc'' - ''^tests/unit/libstore/path\.cc'' - ''^tests/unit/libstore/serve-protocol\.cc'' - ''^tests/unit/libstore/worker-protocol\.cc'' - ''^tests/unit/libutil-support/tests/characterization\.hh'' - ''^tests/unit/libutil-support/tests/hash\.cc'' - ''^tests/unit/libutil-support/tests/hash\.hh'' - ''^tests/unit/libutil/args\.cc'' - ''^tests/unit/libutil/canon-path\.cc'' - ''^tests/unit/libutil/chunked-vector\.cc'' - ''^tests/unit/libutil/closure\.cc'' - ''^tests/unit/libutil/compression\.cc'' - ''^tests/unit/libutil/config\.cc'' - ''^tests/unit/libutil/file-content-address\.cc'' - ''^tests/unit/libutil/git\.cc'' - ''^tests/unit/libutil/hash\.cc'' - ''^tests/unit/libutil/hilite\.cc'' - ''^tests/unit/libutil/json-utils\.cc'' - ''^tests/unit/libutil/logging\.cc'' - ''^tests/unit/libutil/lru-cache\.cc'' - ''^tests/unit/libutil/pool\.cc'' - ''^tests/unit/libutil/references\.cc'' - ''^tests/unit/libutil/suggestions\.cc'' - ''^tests/unit/libutil/url\.cc'' - ''^tests/unit/libutil/xml-writer\.cc'' + ''^src/libexpr-test-support/tests/libexpr\.hh'' + ''^src/libexpr-test-support/tests/value/context\.cc'' + ''^src/libexpr-test-support/tests/value/context\.hh'' + ''^src/libexpr-tests/derived-path\.cc'' + ''^src/libexpr-tests/error_traces\.cc'' + ''^src/libexpr-tests/eval\.cc'' + ''^src/libexpr-tests/json\.cc'' + ''^src/libexpr-tests/main\.cc'' + ''^src/libexpr-tests/primops\.cc'' + ''^src/libexpr-tests/search-path\.cc'' + ''^src/libexpr-tests/trivial\.cc'' + ''^src/libexpr-tests/value/context\.cc'' + ''^src/libexpr-tests/value/print\.cc'' + ''^src/libfetchers-tests/public-key\.cc'' + ''^src/libflake-tests/flakeref\.cc'' + ''^src/libflake-tests/url-name\.cc'' + ''^src/libstore-test-support/tests/derived-path\.cc'' + ''^src/libstore-test-support/tests/derived-path\.hh'' + ''^src/libstore-test-support/tests/nix_api_store\.hh'' + ''^src/libstore-test-support/tests/outputs-spec\.cc'' + ''^src/libstore-test-support/tests/outputs-spec\.hh'' + ''^src/libstore-test-support/tests/path\.cc'' + ''^src/libstore-test-support/tests/path\.hh'' + ''^src/libstore-test-support/tests/protocol\.hh'' + ''^src/libstore-tests/common-protocol\.cc'' + ''^src/libstore-tests/content-address\.cc'' + ''^src/libstore-tests/derivation\.cc'' + ''^src/libstore-tests/derived-path\.cc'' + ''^src/libstore-tests/downstream-placeholder\.cc'' + ''^src/libstore-tests/machines\.cc'' + ''^src/libstore-tests/nar-info-disk-cache\.cc'' + ''^src/libstore-tests/nar-info\.cc'' + ''^src/libstore-tests/outputs-spec\.cc'' + ''^src/libstore-tests/path-info\.cc'' + ''^src/libstore-tests/path\.cc'' + ''^src/libstore-tests/serve-protocol\.cc'' + ''^src/libstore-tests/worker-protocol\.cc'' + ''^src/libutil-test-support/tests/characterization\.hh'' + ''^src/libutil-test-support/tests/hash\.cc'' + ''^src/libutil-test-support/tests/hash\.hh'' + ''^src/libutil-tests/args\.cc'' + ''^src/libutil-tests/canon-path\.cc'' + ''^src/libutil-tests/chunked-vector\.cc'' + ''^src/libutil-tests/closure\.cc'' + ''^src/libutil-tests/compression\.cc'' + ''^src/libutil-tests/config\.cc'' + ''^src/libutil-tests/file-content-address\.cc'' + ''^src/libutil-tests/git\.cc'' + ''^src/libutil-tests/hash\.cc'' + ''^src/libutil-tests/hilite\.cc'' + ''^src/libutil-tests/json-utils\.cc'' + ''^src/libutil-tests/logging\.cc'' + ''^src/libutil-tests/lru-cache\.cc'' + ''^src/libutil-tests/pool\.cc'' + ''^src/libutil-tests/references\.cc'' + ''^src/libutil-tests/suggestions\.cc'' + ''^src/libutil-tests/url\.cc'' + ''^src/libutil-tests/xml-writer\.cc'' ]; }; shellcheck = { @@ -501,14 +499,13 @@ ''^scripts/install-nix-from-closure\.sh$'' ''^scripts/install-systemd-multi-user\.sh$'' ''^src/nix/get-env\.sh$'' - ''^tests/functional/build\.sh$'' ''^tests/functional/ca/build-dry\.sh$'' ''^tests/functional/ca/build-with-garbage-path\.sh$'' ''^tests/functional/ca/common\.sh$'' ''^tests/functional/ca/concurrent-builds\.sh$'' ''^tests/functional/ca/eval-store\.sh$'' ''^tests/functional/ca/gc\.sh$'' - ''^tests/functional/ca/import-derivation\.sh$'' + ''^tests/functional/ca/import-from-derivation\.sh$'' ''^tests/functional/ca/new-build-cmd\.sh$'' ''^tests/functional/ca/nix-shell\.sh$'' ''^tests/functional/ca/post-hook\.sh$'' @@ -517,7 +514,6 @@ ''^tests/functional/ca/selfref-gc\.sh$'' ''^tests/functional/ca/why-depends\.sh$'' ''^tests/functional/characterisation-test-infra\.sh$'' - ''^tests/functional/check\.sh$'' ''^tests/functional/common/vars-and-functions\.sh$'' ''^tests/functional/completions\.sh$'' ''^tests/functional/compute-levels\.sh$'' @@ -534,7 +530,6 @@ ''^tests/functional/dyn-drv/old-daemon-error-hack\.sh$'' ''^tests/functional/dyn-drv/recursive-mod-json\.sh$'' ''^tests/functional/eval-store\.sh$'' - ''^tests/functional/eval\.sh$'' ''^tests/functional/export-graph\.sh$'' ''^tests/functional/export\.sh$'' ''^tests/functional/extra-sandbox-profile\.sh$'' @@ -544,15 +539,12 @@ ''^tests/functional/fetchGitSubmodules\.sh$'' ''^tests/functional/fetchGitVerification\.sh$'' ''^tests/functional/fetchMercurial\.sh$'' - ''^tests/functional/fetchurl\.sh$'' ''^tests/functional/fixed\.builder1\.sh$'' ''^tests/functional/fixed\.builder2\.sh$'' ''^tests/functional/fixed\.sh$'' ''^tests/functional/flakes/absolute-paths\.sh$'' ''^tests/functional/flakes/check\.sh$'' - ''^tests/functional/flakes/common\.sh$'' ''^tests/functional/flakes/config\.sh$'' - ''^tests/functional/flakes/develop\.sh$'' ''^tests/functional/flakes/flakes\.sh$'' ''^tests/functional/flakes/follow-paths\.sh$'' ''^tests/functional/flakes/prefetch\.sh$'' @@ -565,16 +557,12 @@ ''^tests/functional/gc-concurrent\.sh$'' ''^tests/functional/gc-concurrent2\.builder\.sh$'' ''^tests/functional/gc-non-blocking\.sh$'' - ''^tests/functional/gc\.sh$'' ''^tests/functional/git-hashing/common\.sh$'' ''^tests/functional/git-hashing/simple\.sh$'' ''^tests/functional/hash-convert\.sh$'' - ''^tests/functional/help\.sh$'' ''^tests/functional/impure-derivations\.sh$'' - ''^tests/functional/impure-env\.sh$'' ''^tests/functional/impure-eval\.sh$'' ''^tests/functional/install-darwin\.sh$'' - ''^tests/functional/lang\.sh$'' ''^tests/functional/legacy-ssh-store\.sh$'' ''^tests/functional/linux-sandbox\.sh$'' ''^tests/functional/local-overlay-store/add-lower-inner\.sh$'' @@ -603,7 +591,6 @@ ''^tests/functional/logging\.sh$'' ''^tests/functional/misc\.sh$'' ''^tests/functional/multiple-outputs\.sh$'' - ''^tests/functional/nar-access\.sh$'' ''^tests/functional/nested-sandboxing\.sh$'' ''^tests/functional/nested-sandboxing/command\.sh$'' ''^tests/functional/nix-build\.sh$'' @@ -624,7 +611,6 @@ ''^tests/functional/path-from-hash-part\.sh$'' ''^tests/functional/path-info\.sh$'' ''^tests/functional/placeholders\.sh$'' - ''^tests/functional/plugins\.sh$'' ''^tests/functional/post-hook\.sh$'' ''^tests/functional/pure-eval\.sh$'' ''^tests/functional/push-to-store-old\.sh$'' @@ -639,7 +625,6 @@ ''^tests/functional/search\.sh$'' ''^tests/functional/secure-drv-outputs\.sh$'' ''^tests/functional/selfref-gc\.sh$'' - ''^tests/functional/shell\.sh$'' ''^tests/functional/shell\.shebang\.sh$'' ''^tests/functional/simple\.builder\.sh$'' ''^tests/functional/supplementary-groups\.sh$'' @@ -649,8 +634,7 @@ ''^tests/functional/user-envs\.builder\.sh$'' ''^tests/functional/user-envs\.sh$'' ''^tests/functional/why-depends\.sh$'' - ''^tests/functional/zstd\.sh$'' - ''^tests/unit/libutil/data/git/check-data\.sh$'' + ''^src/libutil-tests/data/git/check-data\.sh$'' ]; }; # TODO: nixfmt, https://github.com/NixOS/nixfmt/issues/153 diff --git a/maintainers/format.sh b/maintainers/format.sh new file mode 100755 index 000000000..a2a6d8b41 --- /dev/null +++ b/maintainers/format.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +if ! type -p pre-commit &>/dev/null; then + echo "format.sh: pre-commit not found. Please use \`nix develop\`."; + exit 1; +fi; +if test -z "$_NIX_PRE_COMMIT_HOOKS_CONFIG"; then + echo "format.sh: _NIX_PRE_COMMIT_HOOKS_CONFIG not set. Please use \`nix develop\`."; + exit 1; +fi; +pre-commit run --config "$_NIX_PRE_COMMIT_HOOKS_CONFIG" --all-files diff --git a/maintainers/local.mk b/maintainers/local.mk index 88d594d67..e81517eda 100644 --- a/maintainers/local.mk +++ b/maintainers/local.mk @@ -3,13 +3,6 @@ print-top-help += echo ' format: Format source code' # This uses the cached .pre-commit-hooks.yaml file +fmt_script := $(d)/format.sh format: - @if ! type -p pre-commit &>/dev/null; then \ - echo "make format: pre-commit not found. Please use \`nix develop\`."; \ - exit 1; \ - fi; \ - if test -z "$$_NIX_PRE_COMMIT_HOOKS_CONFIG"; then \ - echo "make format: _NIX_PRE_COMMIT_HOOKS_CONFIG not set. Please use \`nix develop\`."; \ - exit 1; \ - fi; \ - pre-commit run --config $$_NIX_PRE_COMMIT_HOOKS_CONFIG --all-files + @$(fmt_script) diff --git a/maintainers/onboarding.md b/maintainers/onboarding.md new file mode 100644 index 000000000..e750bd8a7 --- /dev/null +++ b/maintainers/onboarding.md @@ -0,0 +1,6 @@ + +# Onboarding a new team member + +- https://github.com/NixOS/nixos-homepage/ +- https://github.com/orgs/NixOS/teams/nix-team +- Matrix room diff --git a/maintainers/release-notes b/maintainers/release-notes index c0c4ee734..0cdcd517b 100755 --- a/maintainers/release-notes +++ b/maintainers/release-notes @@ -78,7 +78,7 @@ if ! git diff --quiet --cached; then die "repo has staged changes, please commit or stash them" fi -if ! grep "$SUMMARY_MARKER_LINE" doc/manual/src/SUMMARY.md.in >/dev/null; then +if ! grep "$SUMMARY_MARKER_LINE" doc/manual/source/SUMMARY.md.in >/dev/null; then # would have been nice to catch this early, but won't be worth the extra infra die "SUMMARY.md.in is missing the marker line '$SUMMARY_MARKER_LINE', which would be used for inserting a new release notes page. Please fix the script." fi @@ -117,7 +117,7 @@ log "version_full=$version_full" log "IS_PATCH=$IS_PATCH" basename=rl-${version_major_minor}.md -file=doc/manual/src/release-notes/$basename +file=doc/manual/source/release-notes/$basename if ! $IS_PATCH; then if [[ -e $file ]]; then @@ -169,7 +169,7 @@ if ! $IS_PATCH; then # find the marker line, insert new link after it escaped_marker="$(echo "$SUMMARY_MARKER_LINE" | sed -e 's/\//\\\//g' -e 's/ /\\ /g')" escaped_line="$(echo "$NEW_SUMMARY_LINE" | sed -e 's/\//\\\//g' -e 's/ /\\ /g')" - logcmd sed -i -e "/$escaped_marker/a $escaped_line" doc/manual/src/SUMMARY.md.in + logcmd sed -i -e "/$escaped_marker/a $escaped_line" doc/manual/source/SUMMARY.md.in fi for f in doc/manual/rl-next/*.md; do @@ -178,7 +178,7 @@ for f in doc/manual/rl-next/*.md; do fi done -logcmd git add $file doc/manual/src/SUMMARY.md.in +logcmd git add $file doc/manual/source/SUMMARY.md.in logcmd git status logcmd git commit -m "release notes: $version_full" diff --git a/maintainers/release-process.md b/maintainers/release-process.md index 7a2b3c0a7..bf3c308cf 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -15,7 +15,7 @@ release: * (Optionally) Updated `fallback-paths.nix` in Nixpkgs -* An updated manual on https://nixos.org/manual/nix/stable/ +* An updated manual on https://nix.dev/manual/nix/latest/ ## Creating a new release from the `master` branch @@ -194,8 +194,58 @@ release: * Bump the version number of the release branch as above (e.g. to `2.12.2`). - + ## Recovering from mistakes `upload-release.pl` should be idempotent. For instance a wrong `IS_LATEST` value can be fixed that way, by running the script on the actual latest release. +## Security releases + +> See also the instructions for [handling security reports](./security-reports.md). + +Once a security fix is ready for merging: + +1. Summarize *all* past communication in the report. + +1. Request a CVE in the [GitHub security advisory](https://github.com/NixOS/nix/security/advisories) for the security fix. + +1. Notify all collaborators on the advisory with a timeline for the release. + +1. Merge the fix. Publish the advisory. + +1. [Make point releases](#creating-point-releases) for all affected versions. + +1. Update the affected Nix releases in Nixpkgs to the patched version. + + For each Nix release, change the `version = ` strings and run + + ```shell-session + nix-build -A nixVersions.nix__ + ``` + + to get the correct hash for the `hash =` field. + +1. Once the release is built by Hydra, update fallback paths. + + For the Nix release `${version}` shipped with Nixpkgs, run: + + ```shell-session + curl https://releases.nixos.org/nix/nix-${version}/fallback-paths.nix > nixos/modules/installer/tools/nix-fallback-paths.nix + ``` + + Starting with Nixpkgs 24.11, there is an automatic check that fallback paths with Nix binaries match the Nix release shipped with Nixpkgs. + +1. Backport the updates to the two most recent stable releases of Nixpkgs. + + Add `backport release-` labels, which will trigger GitHub Actions to attempt automatic backports. + +1. Once the pull request against `master` lands on `nixpkgs-unstable`, post a Discourse announcement with + + - Links to the CVE and GitHub security advisory + - A description of the vulnerability and its fix + - Credits to the reporters of the vulnerability and contributors of the fix + - A list of affected and patched Nix releases + - Instructions for updating + - A link to the [pull request tracker](https://nixpk.gs/pr-tracker.html) to follow when the patched Nix versions will appear on the various release channels + + Check [past announcements](https://discourse.nixos.org/search?expanded=true&q=Security%20fix%20in%3Atitle%20order%3Alatest_topic) for reference. diff --git a/maintainers/security-reports.md b/maintainers/security-reports.md new file mode 100644 index 000000000..700590cf7 --- /dev/null +++ b/maintainers/security-reports.md @@ -0,0 +1,31 @@ +# Handling security reports + +Reports can be expected to be submitted following the [security policy](https://github.com/NixOS/nix/security/policy), but may reach maintainers on various other channels. + +In case a vulnerability is reported: + +1. [Create a GitHub security advisory](https://github.com/NixOS/nix/security/advisories/new) + + > [!IMPORTANT] + > Add the reporter as a collaborator so they get notified of all activities. + + In addition to the details in the advisory template, the initial report should: + + - Include sufficient details of the vulnerability to allow it to be understood and reproduced. + - Redact any personal data. + - Set a deadline (if applicable). + - Provide proof of concept code (if available). + - Reference any further reading material that may be appropriate. + +1. Establish a private communication channel (e.g. a Matrix room) with the reporter and all Nix maintainers. + +1. Communicate with the reporter which team members are assigned and when they are available. + +1. Consider which immediate preliminary measures should be taken before working on a fix. + +1. Prioritize fixing the security issue over ongoing work. + +1. Keep everyone involved up to date on progress and the estimated timeline for releasing the fix. + +> See also the instructions for [security releases](./release-process.md#security-releases). + diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index 731988568..8a470c7cc 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -112,7 +112,7 @@ sub copyManual { system("xz -d < '$manualNar' | nix-store --restore $tmpDir/manual.tmp") == 0 or die "unable to unpack $manualNar\n"; rename("$tmpDir/manual.tmp/share/doc/nix/manual", "$tmpDir/manual") or die; - system("rm -rf '$tmpDir/manual.tmp'") == 0 or die; + File::Path::remove_tree("$tmpDir/manual.tmp", {safe => 1}); } system("aws s3 sync '$tmpDir/manual' s3://$releasesBucketName/$releaseDir/manual") == 0 @@ -281,3 +281,6 @@ system("git remote update origin") == 0 or die; system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die; system("git push --tags") == 0 or die; system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die if $isLatest; + +File::Path::remove_tree($narCache, {safe => 1}); +File::Path::remove_tree($tmpDir, {safe => 1}); diff --git a/meson.build b/meson.build index 715a3862d..8985b631e 100644 --- a/meson.build +++ b/meson.build @@ -4,6 +4,9 @@ project('nix-dev-shell', 'cpp', version : files('.version'), subproject_dir : 'src', + default_options : [ + 'localstatedir=/nix/var', + ] ) # Internal Libraries @@ -19,8 +22,13 @@ subproject('libcmd') subproject('nix') # Docs -subproject('internal-api-docs') -subproject('external-api-docs') +if get_option('doc-gen') + subproject('internal-api-docs') + subproject('external-api-docs') + if not meson.is_cross_build() + subproject('nix-manual') + endif +endif # External C wrapper libraries subproject('libutil-c') @@ -29,17 +37,19 @@ subproject('libexpr-c') subproject('libmain-c') # Language Bindings -if not meson.is_cross_build() +if get_option('bindings') and not meson.is_cross_build() subproject('perl') endif # Testing -subproject('nix-util-test-support') -subproject('nix-util-tests') -subproject('nix-store-test-support') -subproject('nix-store-tests') -subproject('nix-fetchers-tests') -subproject('nix-expr-test-support') -subproject('nix-expr-tests') -subproject('nix-flake-tests') +if get_option('unit-tests') + subproject('libutil-test-support') + subproject('libutil-tests') + subproject('libstore-test-support') + subproject('libstore-tests') + subproject('libfetchers-tests') + subproject('libexpr-test-support') + subproject('libexpr-tests') + subproject('libflake-tests') +endif subproject('nix-functional-tests') diff --git a/meson.options b/meson.options new file mode 100644 index 000000000..b3b3b4043 --- /dev/null +++ b/meson.options @@ -0,0 +1,13 @@ +# vim: filetype=meson + +option('doc-gen', type : 'boolean', value : true, + description : 'Generate documentation', +) + +option('unit-tests', type : 'boolean', value : true, + description : 'Build unit tests', +) + +option('bindings', type : 'boolean', value : true, + description : 'Build language bindings (e.g. Perl)', +) diff --git a/misc/bash/completion.sh b/misc/bash/completion.sh index 9af695f5a..c4ba96cd3 100644 --- a/misc/bash/completion.sh +++ b/misc/bash/completion.sh @@ -12,9 +12,16 @@ function _complete_nix { elif [[ $completion == attrs ]]; then compopt -o nospace fi - else - COMPREPLY+=("$completion") + continue fi + + if [[ "${cur}" =~ "=" ]]; then + # drop everything up to the first =. if a = is included, bash assumes this to be + # an arg=value argument and the completion gets mangled (see #11208) + completion="${completion#*=}" + fi + + COMPREPLY+=("${completion}") done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}" 2>/dev/null) __ltrim_colon_completions "$cur" } diff --git a/misc/bash/meson.build b/misc/bash/meson.build new file mode 100644 index 000000000..8a97a02cb --- /dev/null +++ b/misc/bash/meson.build @@ -0,0 +1,8 @@ +configure_file( + input : 'completion.sh', + output : 'nix', + install : true, + install_dir : get_option('datadir') / 'bash-completion' / 'completions', + install_mode : 'rw-r--r--', + copy : true, +) diff --git a/misc/fish/meson.build b/misc/fish/meson.build new file mode 100644 index 000000000..e7e89b438 --- /dev/null +++ b/misc/fish/meson.build @@ -0,0 +1,8 @@ +configure_file( + input : 'completion.fish', + output : 'nix.fish', + install : true, + install_dir : get_option('datadir') / 'fish' / 'vendor_completions.d', + install_mode : 'rw-r--r--', + copy : true, +) diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in index e1470cf99..664608305 100644 --- a/misc/launchd/org.nixos.nix-daemon.plist.in +++ b/misc/launchd/org.nixos.nix-daemon.plist.in @@ -2,11 +2,6 @@ - EnvironmentVariables - - OBJC_DISABLE_INITIALIZE_FORK_SAFETY - YES - Label org.nixos.nix-daemon KeepAlive diff --git a/misc/meson.build b/misc/meson.build new file mode 100644 index 000000000..a6d1f944b --- /dev/null +++ b/misc/meson.build @@ -0,0 +1,5 @@ +subdir('bash') +subdir('fish') +subdir('zsh') + +subdir('systemd') diff --git a/misc/systemd/meson.build b/misc/systemd/meson.build new file mode 100644 index 000000000..6ccb6a873 --- /dev/null +++ b/misc/systemd/meson.build @@ -0,0 +1,25 @@ +foreach config : [ 'nix-daemon.socket', 'nix-daemon.service' ] + configure_file( + input : config + '.in', + output : config, + install : true, + install_dir : get_option('prefix') / 'lib/systemd/system', + install_mode : 'rw-r--r--', + configuration : { + 'storedir' : store_dir, + 'localstatedir' : localstatedir, + 'bindir' : bindir, + }, + ) +endforeach + +configure_file( + input : 'nix-daemon.conf.in', + output : 'nix-daemon.conf', + install : true, + install_dir : get_option('prefix') / 'lib/tmpfiles.d', + install_mode : 'rw-r--r--', + configuration : { + 'localstatedir' : localstatedir, + }, +) diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in index 45fbea02c..b3055cfe2 100644 --- a/misc/systemd/nix-daemon.service.in +++ b/misc/systemd/nix-daemon.service.in @@ -11,6 +11,7 @@ ExecStart=@@bindir@/nix-daemon nix-daemon --daemon KillMode=process LimitNOFILE=1048576 TasksMax=1048576 +Delegate=yes [Install] WantedBy=multi-user.target diff --git a/misc/zsh/meson.build b/misc/zsh/meson.build new file mode 100644 index 000000000..f3d0426e7 --- /dev/null +++ b/misc/zsh/meson.build @@ -0,0 +1,10 @@ +foreach script : [ [ 'completion.zsh', '_nix' ], [ 'run-help-nix' ] ] + configure_file( + input : script[0], + output : script.get(1, script[0]), + install : true, + install_dir : get_option('datadir') / 'zsh/site-functions', + install_mode : 'rw-r--r--', + copy : true, + ) +endforeach diff --git a/mk/common-test.sh b/mk/common-test.sh index 817422c40..dd899e869 100644 --- a/mk/common-test.sh +++ b/mk/common-test.sh @@ -9,13 +9,21 @@ test_name=$(echo -n "${test?must be defined by caller (test runner)}" | sed \ -e "s|\.sh$||" \ ) +# Layer violation, but I am not inclined to care too much, as this code +# is about to be deleted. +src_dir=$(realpath tests/functional) + # shellcheck disable=SC2016 TESTS_ENVIRONMENT=( "TEST_NAME=$test_name" 'NIX_REMOTE=' 'PS4=+(${BASH_SOURCE[0]-$0}:$LINENO) ' + "_NIX_TEST_SOURCE_DIR=${src_dir}" + "_NIX_TEST_BUILD_DIR=${src_dir}" ) +unset src_dir + read -r -a bash <<< "${BASH:-/usr/bin/env bash}" run () { diff --git a/mk/libraries.mk b/mk/libraries.mk index b99ba2782..a7848ba35 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -86,7 +86,9 @@ define build-library else ifndef HOST_DARWIN ifndef HOST_WINDOWS - $(1)_LDFLAGS += -Wl,-z,defs + ifndef HOST_OPENBSD + $(1)_LDFLAGS += -Wl,-z,defs + endif endif endif endif diff --git a/mk/platform.mk b/mk/platform.mk index 22c114a20..3c4fff780 100644 --- a/mk/platform.mk +++ b/mk/platform.mk @@ -21,6 +21,10 @@ ifdef HOST_OS HOST_NETBSD = 1 HOST_UNIX = 1 endif + ifeq ($(patsubst openbsd%,,$(HOST_KERNEL)),) + HOST_OPENBSD = 1 + HOST_UNIX = 1 + endif ifeq ($(HOST_KERNEL), linux) HOST_LINUX = 1 HOST_UNIX = 1 diff --git a/package.nix b/package.nix index e3c75530b..44c31d923 100644 --- a/package.nix +++ b/package.nix @@ -23,6 +23,7 @@ , libseccomp , libsodium , man +, darwin , lowdown , mdbook , mdbook-linkcheck @@ -53,10 +54,14 @@ # Whether to build Nix. Useful to skip for tasks like testing existing pre-built versions of Nix , doBuild ? true +# Run the unit tests as part of the build. See `installUnitTests` for an +# alternative to this. +, doCheck ? __forDefaults.canRunInstalled + # Run the functional tests as part of the build. , doInstallCheck ? test-client != null || __forDefaults.canRunInstalled -# Check test coverage of Nix. Probably want to use with with at least +# Check test coverage of Nix. Probably want to use with at least # one of `doCHeck` or `doInstallCheck` enabled. , withCoverageChecks ? false @@ -72,7 +77,9 @@ # # Temporarily disabled on Windows because the `GC_throw_bad_alloc` # symbol is missing during linking. -, enableGC ? !stdenv.hostPlatform.isWindows +# +# Disabled on OpenBSD because of missing `_data_start` symbol while linking +, enableGC ? !stdenv.hostPlatform.isWindows && !stdenv.hostPlatform.isOpenBSD # Whether to enable Markdown rendering in the Nix binary. , enableMarkdown ? !stdenv.hostPlatform.isWindows @@ -85,6 +92,11 @@ # - readline , readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" +# Whether to install unit tests. This is useful when cross compiling +# since we cannot run them natively during the build, but can do so +# later. +, installUnitTests ? doBuild && !__forDefaults.canExecuteHost + # For running the functional tests against a pre-built Nix. Probably # want to use in conjunction with `doBuild = false;`. , test-daemon ? null @@ -108,7 +120,7 @@ let # things which should instead be gotten via `finalAttrs` in order to # work with overriding. attrs = { - inherit doBuild doInstallCheck; + inherit doBuild doCheck doInstallCheck; }; mkDerivation = @@ -124,11 +136,16 @@ in mkDerivation (finalAttrs: let inherit (finalAttrs) + doCheck doInstallCheck ; doBuild = !finalAttrs.dontBuild; + # Either running the unit tests during the build, or installing them + # to be run later, requiresthe unit tests to be built. + buildUnitTests = doCheck || installUnitTests; + in { inherit pname version; @@ -174,6 +191,8 @@ in { # If we are doing just build or just docs, the one thing will use # "out". We only need additional outputs if we are doing both. ++ lib.optional (doBuild && enableManual) "doc" + ++ lib.optional installUnitTests "check" + ++ lib.optional doCheck "testresults" ; nativeBuildInputs = [ @@ -191,9 +210,10 @@ in { git mercurial openssh - man # for testing `nix-* --help` ] ++ lib.optionals (doInstallCheck || enableManual) [ jq # Also for custom mdBook preprocessor. + ] ++ lib.optionals enableManual [ + man ] ++ lib.optional stdenv.hostPlatform.isStatic unixtools.hexdump ; @@ -212,7 +232,11 @@ in { ({ inherit readline editline; }.${readlineFlavor}) ] ++ lib.optionals enableMarkdown [ lowdown + ] ++ lib.optionals buildUnitTests [ + gtest + rapidcheck ] ++ lib.optional stdenv.isLinux libseccomp + ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid # There have been issues building these dependencies ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) @@ -226,16 +250,22 @@ in { ); dontBuild = !attrs.doBuild; + doCheck = attrs.doCheck; configureFlags = [ (lib.enableFeature doBuild "build") + (lib.enableFeature buildUnitTests "unit-tests") (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableManual "doc-gen") (lib.enableFeature enableGC "gc") (lib.enableFeature enableMarkdown "markdown") + (lib.enableFeature installUnitTests "install-unit-tests") (lib.withFeatureAs true "readline-flavor" readlineFlavor) ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" + ] ++ lib.optionals installUnitTests [ + "--with-check-bin-dir=${builtins.placeholder "check"}/bin" + "--with-check-lib-dir=${builtins.placeholder "check"}/lib" ] ++ lib.optionals (doBuild) [ "--with-boost=${boost}/lib" ] ++ lib.optionals (doBuild && stdenv.isLinux) [ @@ -298,11 +328,6 @@ in { preInstallCheck = lib.optionalString (! doBuild) '' mkdir -p src/nix-channel - '' - # See https://github.com/NixOS/nix/issues/2523 - # Occurs often in tests since https://github.com/NixOS/nix/pull/9900 - + lib.optionalString stdenv.hostPlatform.isDarwin '' - export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES ''; separateDebugInfo = !stdenv.hostPlatform.isStatic; @@ -316,6 +341,10 @@ in { platforms = lib.platforms.unix ++ lib.platforms.windows; mainProgram = "nix"; broken = !(lib.all (a: a) [ + # We cannot run or install unit tests if we don't build them or + # Nix proper (which they depend on). + (installUnitTests -> doBuild) + (doCheck -> doBuild) # The build process for the manual currently requires extracting # data from the Nix executable we are trying to document. (enableManual -> doBuild) diff --git a/packaging/components.nix b/packaging/components.nix index 5fc3236cf..5cc0be784 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -32,24 +32,24 @@ in nix-util = callPackage ../src/libutil/package.nix { }; nix-util-c = callPackage ../src/libutil-c/package.nix { }; - nix-util-test-support = callPackage ../tests/unit/libutil-support/package.nix { }; - nix-util-tests = callPackage ../tests/unit/libutil/package.nix { }; + nix-util-test-support = callPackage ../src/libutil-test-support/package.nix { }; + nix-util-tests = callPackage ../src/libutil-tests/package.nix { }; nix-store = callPackage ../src/libstore/package.nix { }; nix-store-c = callPackage ../src/libstore-c/package.nix { }; - nix-store-test-support = callPackage ../tests/unit/libstore-support/package.nix { }; - nix-store-tests = callPackage ../tests/unit/libstore/package.nix { }; + nix-store-test-support = callPackage ../src/libstore-test-support/package.nix { }; + nix-store-tests = callPackage ../src/libstore-tests/package.nix { }; nix-fetchers = callPackage ../src/libfetchers/package.nix { }; - nix-fetchers-tests = callPackage ../tests/unit/libfetchers/package.nix { }; + nix-fetchers-tests = callPackage ../src/libfetchers-tests/package.nix { }; nix-expr = callPackage ../src/libexpr/package.nix { }; nix-expr-c = callPackage ../src/libexpr-c/package.nix { }; - nix-expr-test-support = callPackage ../tests/unit/libexpr-support/package.nix { }; - nix-expr-tests = callPackage ../tests/unit/libexpr/package.nix { }; + nix-expr-test-support = callPackage ../src/libexpr-test-support/package.nix { }; + nix-expr-tests = callPackage ../src/libexpr-tests/package.nix { }; nix-flake = callPackage ../src/libflake/package.nix { }; - nix-flake-tests = callPackage ../tests/unit/libflake/package.nix { }; + nix-flake-tests = callPackage ../src/libflake-tests/package.nix { }; nix-main = callPackage ../src/libmain/package.nix { }; nix-main-c = callPackage ../src/libmain-c/package.nix { }; @@ -60,6 +60,7 @@ in nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { version = fineVersion; }; + nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; }; nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; }; nix-external-api-docs = callPackage ../src/external-api-docs/package.nix { version = fineVersion; }; diff --git a/packaging/dependencies.nix b/packaging/dependencies.nix index 21c48e5cc..13766f2c0 100644 --- a/packaging/dependencies.nix +++ b/packaging/dependencies.nix @@ -38,6 +38,10 @@ let # Indirection for Nixpkgs to override when package.nix files are vendored filesetToSource = lib.fileset.toSource; + /** Given a set of layers, create a mkDerivation-like function */ + mkPackageBuilder = exts: userFn: + stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); + localSourceLayer = finalAttrs: prevAttrs: let workDirPath = @@ -60,6 +64,28 @@ let workDir = null; }; + mesonLayer = finalAttrs: prevAttrs: + { + nativeBuildInputs = [ + pkgs.buildPackages.meson + pkgs.buildPackages.ninja + ] ++ prevAttrs.nativeBuildInputs or []; + }; + + mesonBuildLayer = finalAttrs: prevAttrs: + { + nativeBuildInputs = prevAttrs.nativeBuildInputs or [] ++ [ + pkgs.buildPackages.pkg-config + ]; + separateDebugInfo = !stdenv.hostPlatform.isStatic; + hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; + }; + + mesonLibraryLayer = finalAttrs: prevAttrs: + { + outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; + }; + # Work around weird `--as-needed` linker behavior with BSD, see # https://github.com/mesonbuild/meson/issues/3593 bsdNoLinkAsNeeded = finalAttrs: prevAttrs: @@ -115,6 +141,29 @@ scope: { version = inputs.libgit2.lastModifiedDate; cmakeFlags = attrs.cmakeFlags or [] ++ [ "-DUSE_SSH=exec" ]; + nativeBuildInputs = attrs.nativeBuildInputs or [] + # gitMinimal does not build on Windows. See packbuilder patch. + ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ + # Needed for `git apply`; see `prePatch` + pkgs.buildPackages.gitMinimal + ]; + # Only `git apply` can handle git binary patches + prePatch = attrs.prePatch or "" + + lib.optionalString (!stdenv.hostPlatform.isWindows) '' + patch() { + git apply + } + ''; + patches = attrs.patches or [] + ++ [ + ./patches/libgit2-mempack-thin-packfile.patch + ] + # gitMinimal does not build on Windows, but fortunately this patch only + # impacts interruptibility + ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ + # binary patch; see `prePatch` + ./patches/libgit2-packbuilder-callback-interruptible.patch + ]; }); busybox-sandbox-shell = pkgs.busybox-sandbox-shell or (pkgs.busybox.override { @@ -149,14 +198,27 @@ scope: { inherit resolvePath filesetToSource; - mkMesonDerivation = f: let - exts = [ + mkMesonDerivation = + mkPackageBuilder [ + miscGoodPractice + localSourceLayer + mesonLayer + ]; + mkMesonExecutable = + mkPackageBuilder [ miscGoodPractice bsdNoLinkAsNeeded localSourceLayer + mesonLayer + mesonBuildLayer + ]; + mkMesonLibrary = + mkPackageBuilder [ + miscGoodPractice + bsdNoLinkAsNeeded + localSourceLayer + mesonLayer + mesonBuildLayer + mesonLibraryLayer ]; - in stdenv.mkDerivation - (lib.extends - (lib.foldr lib.composeExtensions (_: _: {}) exts) - f); } diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix new file mode 100644 index 000000000..4b2a87632 --- /dev/null +++ b/packaging/dev-shell.nix @@ -0,0 +1,96 @@ +{ lib, devFlake }: + +{ pkgs }: + +(pkgs.nix.override { forDevShell = true; }).overrideAttrs (attrs: + +let + stdenv = pkgs.nixDependencies.stdenv; + buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + modular = devFlake.getSystem stdenv.buildPlatform.system; + transformFlag = prefix: flag: + assert builtins.isString flag; + let + rest = builtins.substring 2 (builtins.stringLength flag) flag; + in + "-D${prefix}:${rest}"; + havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; + ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; +in { + pname = "shell-for-" + attrs.pname; + + # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop + version = lib.fileContents ../.version; + name = attrs.pname; + + installFlags = "sysconfdir=$(out)/etc"; + shellHook = '' + PATH=$prefix/bin:$PATH + unset PYTHONPATH + export MANPATH=$out/share/man:$MANPATH + + # Make bash completion work. + XDG_DATA_DIRS+=:$out/share + ''; + + # We use this shell with the local checkout, not unpackPhase. + src = null; + + env = { + # Needed for Meson to find Boost. + # https://github.com/NixOS/nixpkgs/issues/86131. + BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include"; + BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib"; + # For `make format`, to work without installing pre-commit + _NIX_PRE_COMMIT_HOOKS_CONFIG = + "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}"; + }; + + mesonFlags = + map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags) + ++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags) + ++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags) + ++ lib.optionals havePerl (map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)) + ++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags) + ++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags) + ; + + nativeBuildInputs = attrs.nativeBuildInputs or [] + ++ pkgs.nixComponents.nix-util.nativeBuildInputs + ++ pkgs.nixComponents.nix-store.nativeBuildInputs + ++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs + ++ pkgs.nixComponents.nix-expr.nativeBuildInputs + ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs + ++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs + ++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs + ++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs + ++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs + ++ lib.optional + (!buildCanExecuteHost + # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 + && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) + && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages + && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)) + pkgs.buildPackages.mesonEmulatorHook + ++ [ + pkgs.buildPackages.cmake + pkgs.buildPackages.shellcheck + pkgs.buildPackages.changelog-d + modular.pre-commit.settings.package + (pkgs.writeScriptBin "pre-commit-hooks-install" + modular.pre-commit.settings.installationScript) + ] + # TODO: Remove the darwin check once + # https://github.com/NixOS/nixpkgs/pull/291814 is available + ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear + ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (lib.hiPrio pkgs.buildPackages.clang-tools); + + buildInputs = attrs.buildInputs or [] + ++ pkgs.nixComponents.nix-util.buildInputs + ++ pkgs.nixComponents.nix-store.buildInputs + ++ pkgs.nixComponents.nix-fetchers.buildInputs + ++ pkgs.nixComponents.nix-expr.buildInputs + ++ pkgs.nixComponents.nix-store-tests.externalBuildInputs + ++ lib.optional havePerl pkgs.perl + ; +}) diff --git a/packaging/everything.nix b/packaging/everything.nix index 8c8ce6611..ae2f93da0 100644 --- a/packaging/everything.nix +++ b/packaging/everything.nix @@ -33,13 +33,14 @@ nix-functional-tests, + nix-manual, nix-internal-api-docs, nix-external-api-docs, nix-perl-bindings, }: -(buildEnv rec { +(buildEnv { name = "nix-${nix-cli.version}"; paths = [ nix-util @@ -70,13 +71,16 @@ nix-cli + nix-manual nix-internal-api-docs nix-external-api-docs ] ++ lib.optionals (stdenv.buildPlatform.canExecute stdenv.hostPlatform) [ nix-perl-bindings ]; -}).overrideAttrs (_: { + + meta.mainProgram = "nix"; +}).overrideAttrs (finalAttrs: prevAttrs: { doCheck = true; doInstallCheck = true; @@ -90,4 +94,37 @@ installCheckInputs = [ nix-functional-tests ]; + passthru = prevAttrs.passthru // { + /** + These are the libraries that are part of the Nix project. They are used + by the Nix CLI and other tools. + + If you need to use these libraries in your project, we recommend to use + the `-c` C API libraries exclusively, if possible. + + We also recommend that you build the complete package to ensure that the unit tests pass. + You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call: + + ```nix + buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ]; + # Make sure the nix libs we use are ok + unusedInputsForTests = [ nix ]; + disallowedReferences = nix.all; + ``` + */ + libs = { + inherit + nix-util + nix-util-c + nix-store + nix-store-c + nix-fetchers + nix-expr + nix-expr-c + nix-flake + nix-main + nix-main-c + ; + }; + }; }) diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 65978835c..f47ed80e3 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -16,25 +16,16 @@ let inherit tarballs; }; - testNixVersions = pkgs: client: daemon: - pkgs.nixComponents.callPackage ../package.nix { + testNixVersions = pkgs: daemon: + pkgs.nixComponents.nix-functional-tests.override { pname = "nix-tests" + lib.optionalString (lib.versionAtLeast daemon.version "2.4pre20211005" && - lib.versionAtLeast client.version "2.4pre20211005") - "-${client.version}-against-${daemon.version}"; + lib.versionAtLeast pkgs.nix.version "2.4pre20211005") + "-${pkgs.nix.version}-against-${daemon.version}"; - test-client = client; test-daemon = daemon; - - doBuild = false; - - # This could be more accurate, but a shorter version will match the - # fine version with rev. This functionality is already covered in - # the normal test, so it's fine. - version = pkgs.nixComponents.version; - versionSuffix = pkgs.nixComponents.versionSuffix; }; # Technically we could just return `pkgs.nixComponents`, but for Hydra it's @@ -71,7 +62,9 @@ in build = forAllPackages (pkgName: forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName})); - shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation); + shellInputs = removeAttrs + (forAllSystems (system: self.devShells.${system}.default.inputDerivation)) + [ "i686-linux" ]; buildStatic = forAllPackages (pkgName: lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName})); @@ -82,20 +75,28 @@ in (forAllCrossSystems (crossSystem: lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName})))); - buildNoGc = forAllSystems (system: - self.packages.${system}.nix.override { enableGC = false; } - ); + buildNoGc = let + components = forAllSystems (system: + nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: { + nix-expr = super.nix-expr.override { enableGC = false; }; + }) + ); + in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli); # Toggles some settings for better coverage. Windows needs these # library combinations, and Debian build Nix with GNU readline too. - buildReadlineNoMarkdown = forAllSystems (system: - self.packages.${system}.nix.override { - enableMarkdown = false; - readlineFlavor = "readline"; - } - ); + buildReadlineNoMarkdown = let + components = forAllSystems (system: + nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: { + nix-cmd = super.nix-cmd.override { + enableMarkdown = false; + readlineFlavor = "readline"; + }; + }) + ); + in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); # Perl bindings for various platforms. perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings); @@ -146,6 +147,9 @@ in withCoverageChecks = true; }; + # Nix's manual + manual = nixpkgsFor.x86_64-linux.native.nixComponents.nix-manual; + # API docs for Nix's unstable internal C++ interfaces. internal-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-internal-api-docs; @@ -193,15 +197,15 @@ in let pkgs = nixpkgsFor.${system}.native; in pkgs.runCommand "install-tests" { - againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix; + againstSelf = testNixVersions pkgs pkgs.nix; againstCurrentLatest = # FIXME: temporarily disable this on macOS because of #3605. if system == "x86_64-linux" - then testNixVersions pkgs pkgs.nix pkgs.nixVersions.latest + then testNixVersions pkgs pkgs.nixVersions.latest else null; # Disabled because the latest stable version doesn't handle # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work - # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable; + # againstLatestStable = testNixVersions pkgs pkgs.nixStable; } "touch $out"); installerTests = import ../tests/installer { diff --git a/packaging/patches/libgit2-mempack-thin-packfile.patch b/packaging/patches/libgit2-mempack-thin-packfile.patch new file mode 100644 index 000000000..fb74b1683 --- /dev/null +++ b/packaging/patches/libgit2-mempack-thin-packfile.patch @@ -0,0 +1,282 @@ +commit 9bacade4a3ef4b6b26e2c02f549eef0e9eb9eaa2 +Author: Robert Hensing +Date: Sun Aug 18 20:20:36 2024 +0200 + + Add unoptimized git_mempack_write_thin_pack + +diff --git a/include/git2/sys/mempack.h b/include/git2/sys/mempack.h +index 17da590a3..3688bdd50 100644 +--- a/include/git2/sys/mempack.h ++++ b/include/git2/sys/mempack.h +@@ -44,6 +44,29 @@ GIT_BEGIN_DECL + */ + GIT_EXTERN(int) git_mempack_new(git_odb_backend **out); + ++/** ++ * Write a thin packfile with the objects in the memory store. ++ * ++ * A thin packfile is a packfile that does not contain its transitive closure of ++ * references. This is useful for efficiently distributing additions to a ++ * repository over the network, but also finds use in the efficient bulk ++ * addition of objects to a repository, locally. ++ * ++ * This operation performs the (shallow) insert operations into the ++ * `git_packbuilder`, but does not write the packfile to disk; ++ * see `git_packbuilder_write_buf`. ++ * ++ * It also does not reset the memory store; see `git_mempack_reset`. ++ * ++ * @note This function may or may not write trees and blobs that are not ++ * referenced by commits. Currently everything is written, but this ++ * behavior may change in the future as the packer is optimized. ++ * ++ * @param backend The mempack backend ++ * @param pb The packbuilder to use to write the packfile ++ */ ++GIT_EXTERN(int) git_mempack_write_thin_pack(git_odb_backend *backend, git_packbuilder *pb); ++ + /** + * Dump all the queued in-memory writes to a packfile. + * +diff --git a/src/libgit2/odb_mempack.c b/src/libgit2/odb_mempack.c +index 6f27f45f8..0b61e2b66 100644 +--- a/src/libgit2/odb_mempack.c ++++ b/src/libgit2/odb_mempack.c +@@ -132,6 +132,35 @@ cleanup: + return err; + } + ++int git_mempack_write_thin_pack(git_odb_backend *backend, git_packbuilder *pb) ++{ ++ struct memory_packer_db *db = (struct memory_packer_db *)backend; ++ const git_oid *oid; ++ size_t iter = 0; ++ int err = -1; ++ ++ /* TODO: Implement the recency heuristics. ++ For this it probably makes sense to only write what's referenced ++ through commits, an option I've carved out for you in the docs. ++ wrt heuristics: ask your favorite LLM to translate https://git-scm.com/docs/pack-heuristics/en ++ to actual normal reference documentation. */ ++ while (true) { ++ err = git_oidmap_iterate(NULL, db->objects, &iter, &oid); ++ if (err == GIT_ITEROVER) { ++ err = 0; ++ break; ++ } ++ if (err != 0) ++ return err; ++ ++ err = git_packbuilder_insert(pb, oid, NULL); ++ if (err != 0) ++ return err; ++ } ++ ++ return 0; ++} ++ + int git_mempack_dump( + git_buf *pack, + git_repository *repo, +diff --git a/tests/libgit2/mempack/thinpack.c b/tests/libgit2/mempack/thinpack.c +new file mode 100644 +index 000000000..604a4dda2 +--- /dev/null ++++ b/tests/libgit2/mempack/thinpack.c +@@ -0,0 +1,196 @@ ++#include "clar_libgit2.h" ++#include "git2/indexer.h" ++#include "git2/odb_backend.h" ++#include "git2/tree.h" ++#include "git2/types.h" ++#include "git2/sys/mempack.h" ++#include "git2/sys/odb_backend.h" ++#include "util.h" ++ ++static git_repository *_repo; ++static git_odb_backend * _mempack_backend; ++ ++void test_mempack_thinpack__initialize(void) ++{ ++ git_odb *odb; ++ ++ _repo = cl_git_sandbox_init_new("mempack_thinpack_repo"); ++ ++ cl_git_pass(git_mempack_new(&_mempack_backend)); ++ cl_git_pass(git_repository_odb(&odb, _repo)); ++ cl_git_pass(git_odb_add_backend(odb, _mempack_backend, 999)); ++ git_odb_free(odb); ++} ++ ++void _mempack_thinpack__cleanup(void) ++{ ++ cl_git_sandbox_cleanup(); ++} ++ ++/* ++ Generating a packfile for an unchanged repo works and produces an empty packfile. ++ Even if we allow this scenario to be detected, it shouldn't misbehave if the ++ application is unaware of it. ++*/ ++void test_mempack_thinpack__empty(void) ++{ ++ git_packbuilder *pb; ++ int version; ++ int n; ++ git_buf buf = GIT_BUF_INIT; ++ ++ git_packbuilder_new(&pb, _repo); ++ ++ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb)); ++ cl_git_pass(git_packbuilder_write_buf(&buf, pb)); ++ cl_assert_in_range(12, buf.size, 1024 /* empty packfile is >0 bytes, but certainly not that big */); ++ cl_assert(buf.ptr[0] == 'P'); ++ cl_assert(buf.ptr[1] == 'A'); ++ cl_assert(buf.ptr[2] == 'C'); ++ cl_assert(buf.ptr[3] == 'K'); ++ version = (buf.ptr[4] << 24) | (buf.ptr[5] << 16) | (buf.ptr[6] << 8) | buf.ptr[7]; ++ /* Subject to change. https://git-scm.com/docs/pack-format: Git currently accepts version number 2 or 3 but generates version 2 only.*/ ++ cl_assert_equal_i(2, version); ++ n = (buf.ptr[8] << 24) | (buf.ptr[9] << 16) | (buf.ptr[10] << 8) | buf.ptr[11]; ++ cl_assert_equal_i(0, n); ++ git_buf_dispose(&buf); ++ ++ git_packbuilder_free(pb); ++} ++ ++#define LIT_LEN(x) x, sizeof(x) - 1 ++ ++/* ++ Check that git_mempack_write_thin_pack produces a thin packfile. ++*/ ++void test_mempack_thinpack__thin(void) ++{ ++ /* Outline: ++ - Create tree 1 ++ - Flush to packfile A ++ - Create tree 2 ++ - Flush to packfile B ++ ++ Tree 2 has a new blob and a reference to a blob from tree 1. ++ ++ Expectation: ++ - Packfile B is thin and does not contain the objects from packfile A ++ */ ++ ++ ++ git_oid oid_blob_1; ++ git_oid oid_blob_2; ++ git_oid oid_blob_3; ++ git_oid oid_tree_1; ++ git_oid oid_tree_2; ++ git_treebuilder *tb; ++ ++ git_packbuilder *pb; ++ git_buf buf = GIT_BUF_INIT; ++ git_indexer *indexer; ++ git_indexer_progress stats; ++ char pack_dir_path[1024]; ++ ++ char sbuf[1024]; ++ const char * repo_path; ++ const char * pack_name_1; ++ const char * pack_name_2; ++ git_str pack_path_1 = GIT_STR_INIT; ++ git_str pack_path_2 = GIT_STR_INIT; ++ git_odb_backend * pack_odb_backend_1; ++ git_odb_backend * pack_odb_backend_2; ++ ++ ++ cl_assert_in_range(0, snprintf(pack_dir_path, sizeof(pack_dir_path), "%s/objects/pack", git_repository_path(_repo)), sizeof(pack_dir_path)); ++ ++ /* Create tree 1 */ ++ ++ cl_git_pass(git_blob_create_from_buffer(&oid_blob_1, _repo, LIT_LEN("thinpack blob 1"))); ++ cl_git_pass(git_blob_create_from_buffer(&oid_blob_2, _repo, LIT_LEN("thinpack blob 2"))); ++ ++ ++ cl_git_pass(git_treebuilder_new(&tb, _repo, NULL)); ++ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob1", &oid_blob_1, GIT_FILEMODE_BLOB)); ++ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob2", &oid_blob_2, GIT_FILEMODE_BLOB)); ++ cl_git_pass(git_treebuilder_write(&oid_tree_1, tb)); ++ ++ /* Flush */ ++ ++ cl_git_pass(git_packbuilder_new(&pb, _repo)); ++ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb)); ++ cl_git_pass(git_packbuilder_write_buf(&buf, pb)); ++ cl_git_pass(git_indexer_new(&indexer, pack_dir_path, 0, NULL, NULL)); ++ cl_git_pass(git_indexer_append(indexer, buf.ptr, buf.size, &stats)); ++ cl_git_pass(git_indexer_commit(indexer, &stats)); ++ pack_name_1 = strdup(git_indexer_name(indexer)); ++ cl_assert(pack_name_1); ++ git_buf_dispose(&buf); ++ git_mempack_reset(_mempack_backend); ++ git_indexer_free(indexer); ++ git_packbuilder_free(pb); ++ ++ /* Create tree 2 */ ++ ++ cl_git_pass(git_treebuilder_clear(tb)); ++ /* blob 1 won't be used, but we add it anyway to test that just "declaring" an object doesn't ++ necessarily cause its inclusion in the next thin packfile. It must only be included if new. */ ++ cl_git_pass(git_blob_create_from_buffer(&oid_blob_1, _repo, LIT_LEN("thinpack blob 1"))); ++ cl_git_pass(git_blob_create_from_buffer(&oid_blob_3, _repo, LIT_LEN("thinpack blob 3"))); ++ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob1", &oid_blob_1, GIT_FILEMODE_BLOB)); ++ cl_git_pass(git_treebuilder_insert(NULL, tb, "blob3", &oid_blob_3, GIT_FILEMODE_BLOB)); ++ cl_git_pass(git_treebuilder_write(&oid_tree_2, tb)); ++ ++ /* Flush */ ++ ++ cl_git_pass(git_packbuilder_new(&pb, _repo)); ++ cl_git_pass(git_mempack_write_thin_pack(_mempack_backend, pb)); ++ cl_git_pass(git_packbuilder_write_buf(&buf, pb)); ++ cl_git_pass(git_indexer_new(&indexer, pack_dir_path, 0, NULL, NULL)); ++ cl_git_pass(git_indexer_append(indexer, buf.ptr, buf.size, &stats)); ++ cl_git_pass(git_indexer_commit(indexer, &stats)); ++ pack_name_2 = strdup(git_indexer_name(indexer)); ++ cl_assert(pack_name_2); ++ git_buf_dispose(&buf); ++ git_mempack_reset(_mempack_backend); ++ git_indexer_free(indexer); ++ git_packbuilder_free(pb); ++ git_treebuilder_free(tb); ++ ++ /* Assertions */ ++ ++ assert(pack_name_1); ++ assert(pack_name_2); ++ ++ repo_path = git_repository_path(_repo); ++ ++ snprintf(sbuf, sizeof(sbuf), "objects/pack/pack-%s.pack", pack_name_1); ++ git_str_joinpath(&pack_path_1, repo_path, sbuf); ++ snprintf(sbuf, sizeof(sbuf), "objects/pack/pack-%s.pack", pack_name_2); ++ git_str_joinpath(&pack_path_2, repo_path, sbuf); ++ ++ /* If they're the same, something definitely went wrong. */ ++ cl_assert(strcmp(pack_name_1, pack_name_2) != 0); ++ ++ cl_git_pass(git_odb_backend_one_pack(&pack_odb_backend_1, pack_path_1.ptr)); ++ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_1)); ++ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_2)); ++ cl_assert(!pack_odb_backend_1->exists(pack_odb_backend_1, &oid_blob_3)); ++ cl_assert(pack_odb_backend_1->exists(pack_odb_backend_1, &oid_tree_1)); ++ cl_assert(!pack_odb_backend_1->exists(pack_odb_backend_1, &oid_tree_2)); ++ ++ cl_git_pass(git_odb_backend_one_pack(&pack_odb_backend_2, pack_path_2.ptr)); ++ /* blob 1 is already in the packfile 1, so packfile 2 must not include it, in order to be _thin_. */ ++ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_1)); ++ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_2)); ++ cl_assert(pack_odb_backend_2->exists(pack_odb_backend_2, &oid_blob_3)); ++ cl_assert(!pack_odb_backend_2->exists(pack_odb_backend_2, &oid_tree_1)); ++ cl_assert(pack_odb_backend_2->exists(pack_odb_backend_2, &oid_tree_2)); ++ ++ pack_odb_backend_1->free(pack_odb_backend_1); ++ pack_odb_backend_2->free(pack_odb_backend_2); ++ free((void *)pack_name_1); ++ free((void *)pack_name_2); ++ git_str_dispose(&pack_path_1); ++ git_str_dispose(&pack_path_2); ++ ++} diff --git a/packaging/patches/libgit2-packbuilder-callback-interruptible.patch b/packaging/patches/libgit2-packbuilder-callback-interruptible.patch new file mode 100644 index 000000000..c67822ff7 --- /dev/null +++ b/packaging/patches/libgit2-packbuilder-callback-interruptible.patch @@ -0,0 +1,930 @@ +commit e9823c5da4fa977c46bcb97167fbdd0d70adb5ff +Author: Robert Hensing +Date: Mon Aug 26 20:07:04 2024 +0200 + + Make packbuilder interruptible using progress callback + + Forward errors from packbuilder->progress_cb + + This allows the callback to terminate long-running operations when + the application is interrupted. + +diff --git a/include/git2/pack.h b/include/git2/pack.h +index 0f6bd2ab9..bee72a6c0 100644 +--- a/include/git2/pack.h ++++ b/include/git2/pack.h +@@ -247,6 +247,9 @@ typedef int GIT_CALLBACK(git_packbuilder_progress)( + * @param progress_cb Function to call with progress information during + * pack building. Be aware that this is called inline with pack building + * operations, so performance may be affected. ++ * When progress_cb returns an error, the pack building process will be ++ * aborted and the error will be returned from the invoked function. ++ * `pb` must then be freed. + * @param progress_cb_payload Payload for progress callback. + * @return 0 or an error code + */ +diff --git a/src/libgit2/pack-objects.c b/src/libgit2/pack-objects.c +index b2d80cba9..7c331c2d5 100644 +--- a/src/libgit2/pack-objects.c ++++ b/src/libgit2/pack-objects.c +@@ -932,6 +932,9 @@ static int report_delta_progress( + { + int ret; + ++ if (pb->failure) ++ return pb->failure; ++ + if (pb->progress_cb) { + uint64_t current_time = git_time_monotonic(); + uint64_t elapsed = current_time - pb->last_progress_report_time; +@@ -943,8 +946,10 @@ static int report_delta_progress( + GIT_PACKBUILDER_DELTAFICATION, + count, pb->nr_objects, pb->progress_cb_payload); + +- if (ret) ++ if (ret) { ++ pb->failure = ret; + return git_error_set_after_callback(ret); ++ } + } + } + +@@ -976,7 +981,10 @@ static int find_deltas(git_packbuilder *pb, git_pobject **list, + } + + pb->nr_deltified += 1; +- report_delta_progress(pb, pb->nr_deltified, false); ++ if ((error = report_delta_progress(pb, pb->nr_deltified, false)) < 0) { ++ GIT_ASSERT(git_packbuilder__progress_unlock(pb) == 0); ++ goto on_error; ++ } + + po = *list++; + (*list_size)--; +@@ -1124,6 +1132,10 @@ struct thread_params { + size_t depth; + size_t working; + size_t data_ready; ++ ++ /* A pb->progress_cb can stop the packing process by returning an error. ++ When that happens, all threads observe the error and stop voluntarily. */ ++ bool stopped; + }; + + static void *threaded_find_deltas(void *arg) +@@ -1133,7 +1145,12 @@ static void *threaded_find_deltas(void *arg) + while (me->remaining) { + if (find_deltas(me->pb, me->list, &me->remaining, + me->window, me->depth) < 0) { +- ; /* TODO */ ++ me->stopped = true; ++ GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_lock(me->pb) == 0, NULL); ++ me->working = false; ++ git_cond_signal(&me->pb->progress_cond); ++ GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_unlock(me->pb) == 0, NULL); ++ return NULL; + } + + GIT_ASSERT_WITH_RETVAL(git_packbuilder__progress_lock(me->pb) == 0, NULL); +@@ -1175,8 +1192,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, + pb->nr_threads = git__online_cpus(); + + if (pb->nr_threads <= 1) { +- find_deltas(pb, list, &list_size, window, depth); +- return 0; ++ return find_deltas(pb, list, &list_size, window, depth); + } + + p = git__mallocarray(pb->nr_threads, sizeof(*p)); +@@ -1195,6 +1211,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, + p[i].depth = depth; + p[i].working = 1; + p[i].data_ready = 0; ++ p[i].stopped = 0; + + /* try to split chunks on "path" boundaries */ + while (sub_size && sub_size < list_size && +@@ -1262,7 +1279,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, + (!victim || victim->remaining < p[i].remaining)) + victim = &p[i]; + +- if (victim) { ++ if (victim && !target->stopped) { + sub_size = victim->remaining / 2; + list = victim->list + victim->list_size - sub_size; + while (sub_size && list[0]->hash && +@@ -1286,7 +1303,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, + } + target->list_size = sub_size; + target->remaining = sub_size; +- target->working = 1; ++ target->working = 1; /* even when target->stopped, so that we don't process this thread again */ + GIT_ASSERT(git_packbuilder__progress_unlock(pb) == 0); + + if (git_mutex_lock(&target->mutex)) { +@@ -1299,7 +1316,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, + git_cond_signal(&target->cond); + git_mutex_unlock(&target->mutex); + +- if (!sub_size) { ++ if (target->stopped || !sub_size) { + git_thread_join(&target->thread, NULL); + git_cond_free(&target->cond); + git_mutex_free(&target->mutex); +@@ -1308,7 +1325,7 @@ static int ll_find_deltas(git_packbuilder *pb, git_pobject **list, + } + + git__free(p); +- return 0; ++ return pb->failure; + } + + #else +@@ -1319,6 +1336,7 @@ int git_packbuilder__prepare(git_packbuilder *pb) + { + git_pobject **delta_list; + size_t i, n = 0; ++ int error; + + if (pb->nr_objects == 0 || pb->done) + return 0; /* nothing to do */ +@@ -1327,8 +1345,10 @@ int git_packbuilder__prepare(git_packbuilder *pb) + * Although we do not report progress during deltafication, we + * at least report that we are in the deltafication stage + */ +- if (pb->progress_cb) +- pb->progress_cb(GIT_PACKBUILDER_DELTAFICATION, 0, pb->nr_objects, pb->progress_cb_payload); ++ if (pb->progress_cb) { ++ if ((error = pb->progress_cb(GIT_PACKBUILDER_DELTAFICATION, 0, pb->nr_objects, pb->progress_cb_payload)) < 0) ++ return git_error_set_after_callback(error); ++ } + + delta_list = git__mallocarray(pb->nr_objects, sizeof(*delta_list)); + GIT_ERROR_CHECK_ALLOC(delta_list); +@@ -1345,31 +1365,33 @@ int git_packbuilder__prepare(git_packbuilder *pb) + + if (n > 1) { + git__tsort((void **)delta_list, n, type_size_sort); +- if (ll_find_deltas(pb, delta_list, n, ++ if ((error = ll_find_deltas(pb, delta_list, n, + GIT_PACK_WINDOW + 1, +- GIT_PACK_DEPTH) < 0) { ++ GIT_PACK_DEPTH)) < 0) { + git__free(delta_list); +- return -1; ++ return error; + } + } + +- report_delta_progress(pb, pb->nr_objects, true); ++ error = report_delta_progress(pb, pb->nr_objects, true); + + pb->done = true; + git__free(delta_list); +- return 0; ++ return error; + } + +-#define PREPARE_PACK if (git_packbuilder__prepare(pb) < 0) { return -1; } ++#define PREPARE_PACK error = git_packbuilder__prepare(pb); if (error < 0) { return error; } + + int git_packbuilder_foreach(git_packbuilder *pb, int (*cb)(void *buf, size_t size, void *payload), void *payload) + { ++ int error; + PREPARE_PACK; + return write_pack(pb, cb, payload); + } + + int git_packbuilder__write_buf(git_str *buf, git_packbuilder *pb) + { ++ int error; + PREPARE_PACK; + + return write_pack(pb, &write_pack_buf, buf); +diff --git a/src/libgit2/pack-objects.h b/src/libgit2/pack-objects.h +index bbc8b9430..380a28ebe 100644 +--- a/src/libgit2/pack-objects.h ++++ b/src/libgit2/pack-objects.h +@@ -100,6 +100,10 @@ struct git_packbuilder { + uint64_t last_progress_report_time; + + bool done; ++ ++ /* A non-zero error code in failure causes all threads to shut themselves ++ down. Some functions will return this error code. */ ++ volatile int failure; + }; + + int git_packbuilder__write_buf(git_str *buf, git_packbuilder *pb); +diff --git a/tests/libgit2/pack/cancel.c b/tests/libgit2/pack/cancel.c +new file mode 100644 +index 000000000..a0aa9716a +--- /dev/null ++++ b/tests/libgit2/pack/cancel.c +@@ -0,0 +1,240 @@ ++#include "clar_libgit2.h" ++#include "futils.h" ++#include "pack.h" ++#include "hash.h" ++#include "iterator.h" ++#include "vector.h" ++#include "posix.h" ++#include "hash.h" ++#include "pack-objects.h" ++ ++static git_repository *_repo; ++static git_revwalk *_revwalker; ++static git_packbuilder *_packbuilder; ++static git_indexer *_indexer; ++static git_vector _commits; ++static int _commits_is_initialized; ++static git_indexer_progress _stats; ++ ++extern bool git_disable_pack_keep_file_checks; ++ ++static void pack_packbuilder_init(const char *sandbox) { ++ _repo = cl_git_sandbox_init(sandbox); ++ /* cl_git_pass(p_chdir(sandbox)); */ ++ cl_git_pass(git_revwalk_new(&_revwalker, _repo)); ++ cl_git_pass(git_packbuilder_new(&_packbuilder, _repo)); ++ cl_git_pass(git_vector_init(&_commits, 0, NULL)); ++ _commits_is_initialized = 1; ++ memset(&_stats, 0, sizeof(_stats)); ++ p_fsync__cnt = 0; ++} ++ ++void test_pack_cancel__initialize(void) ++{ ++ pack_packbuilder_init("small.git"); ++} ++ ++void test_pack_cancel__cleanup(void) ++{ ++ git_oid *o; ++ unsigned int i; ++ ++ cl_git_pass(git_libgit2_opts(GIT_OPT_ENABLE_FSYNC_GITDIR, 0)); ++ cl_git_pass(git_libgit2_opts(GIT_OPT_DISABLE_PACK_KEEP_FILE_CHECKS, false)); ++ ++ if (_commits_is_initialized) { ++ _commits_is_initialized = 0; ++ git_vector_foreach(&_commits, i, o) { ++ git__free(o); ++ } ++ git_vector_free(&_commits); ++ } ++ ++ git_packbuilder_free(_packbuilder); ++ _packbuilder = NULL; ++ ++ git_revwalk_free(_revwalker); ++ _revwalker = NULL; ++ ++ git_indexer_free(_indexer); ++ _indexer = NULL; ++ ++ /* cl_git_pass(p_chdir("..")); */ ++ cl_git_sandbox_cleanup(); ++ _repo = NULL; ++} ++ ++static int seed_packbuilder(void) ++{ ++ int error; ++ git_oid oid, *o; ++ unsigned int i; ++ ++ git_revwalk_sorting(_revwalker, GIT_SORT_TIME); ++ cl_git_pass(git_revwalk_push_ref(_revwalker, "HEAD")); ++ ++ while (git_revwalk_next(&oid, _revwalker) == 0) { ++ o = git__malloc(sizeof(git_oid)); ++ cl_assert(o != NULL); ++ git_oid_cpy(o, &oid); ++ cl_git_pass(git_vector_insert(&_commits, o)); ++ } ++ ++ git_vector_foreach(&_commits, i, o) { ++ if((error = git_packbuilder_insert(_packbuilder, o, NULL)) < 0) ++ return error; ++ } ++ ++ git_vector_foreach(&_commits, i, o) { ++ git_object *obj; ++ cl_git_pass(git_object_lookup(&obj, _repo, o, GIT_OBJECT_COMMIT)); ++ error = git_packbuilder_insert_tree(_packbuilder, ++ git_commit_tree_id((git_commit *)obj)); ++ git_object_free(obj); ++ if (error < 0) ++ return error; ++ } ++ ++ return 0; ++} ++ ++static int fail_stage; ++ ++static int packbuilder_cancel_after_n_calls_cb(int stage, uint32_t current, uint32_t total, void *payload) ++{ ++ ++ /* Force the callback to run again on the next opportunity regardless ++ of how fast we're running. */ ++ _packbuilder->last_progress_report_time = 0; ++ ++ if (stage == fail_stage) { ++ int *calls = (int *)payload; ++ int n = *calls; ++ /* Always decrement, including past zero. This way the error is only ++ triggered once, making sure it is picked up immediately. */ ++ --*calls; ++ if (n == 0) ++ return GIT_EUSER; ++ } ++ ++ return 0; ++} ++ ++static void test_cancel(int n) ++{ ++ ++ int calls_remaining = n; ++ int err; ++ git_buf buf = GIT_BUF_INIT; ++ ++ /* Switch to a small repository, so that `packbuilder_cancel_after_n_calls_cb` ++ can hack the time to call the callback on every opportunity. */ ++ ++ cl_git_pass(git_packbuilder_set_callbacks(_packbuilder, &packbuilder_cancel_after_n_calls_cb, &calls_remaining)); ++ err = seed_packbuilder(); ++ if (!err) ++ err = git_packbuilder_write_buf(&buf, _packbuilder); ++ ++ cl_assert_equal_i(GIT_EUSER, err); ++} ++void test_pack_cancel__cancel_after_add_0(void) ++{ ++ fail_stage = GIT_PACKBUILDER_ADDING_OBJECTS; ++ test_cancel(0); ++} ++ ++void test_pack_cancel__cancel_after_add_1(void) ++{ ++ cl_skip(); ++ fail_stage = GIT_PACKBUILDER_ADDING_OBJECTS; ++ test_cancel(1); ++} ++ ++void test_pack_cancel__cancel_after_delta_0(void) ++{ ++ fail_stage = GIT_PACKBUILDER_DELTAFICATION; ++ test_cancel(0); ++} ++ ++void test_pack_cancel__cancel_after_delta_1(void) ++{ ++ fail_stage = GIT_PACKBUILDER_DELTAFICATION; ++ test_cancel(1); ++} ++ ++void test_pack_cancel__cancel_after_delta_0_threaded(void) ++{ ++#ifdef GIT_THREADS ++ git_packbuilder_set_threads(_packbuilder, 8); ++ fail_stage = GIT_PACKBUILDER_DELTAFICATION; ++ test_cancel(0); ++#else ++ cl_skip(); ++#endif ++} ++ ++void test_pack_cancel__cancel_after_delta_1_threaded(void) ++{ ++#ifdef GIT_THREADS ++ git_packbuilder_set_threads(_packbuilder, 8); ++ fail_stage = GIT_PACKBUILDER_DELTAFICATION; ++ test_cancel(1); ++#else ++ cl_skip(); ++#endif ++} ++ ++static int foreach_cb(void *buf, size_t len, void *payload) ++{ ++ git_indexer *idx = (git_indexer *) payload; ++ cl_git_pass(git_indexer_append(idx, buf, len, &_stats)); ++ return 0; ++} ++ ++void test_pack_cancel__foreach(void) ++{ ++ git_indexer *idx; ++ ++ seed_packbuilder(); ++ ++#ifdef GIT_EXPERIMENTAL_SHA256 ++ cl_git_pass(git_indexer_new(&idx, ".", GIT_OID_SHA1, NULL)); ++#else ++ cl_git_pass(git_indexer_new(&idx, ".", 0, NULL, NULL)); ++#endif ++ ++ cl_git_pass(git_packbuilder_foreach(_packbuilder, foreach_cb, idx)); ++ cl_git_pass(git_indexer_commit(idx, &_stats)); ++ git_indexer_free(idx); ++} ++ ++static int foreach_cancel_cb(void *buf, size_t len, void *payload) ++{ ++ git_indexer *idx = (git_indexer *)payload; ++ cl_git_pass(git_indexer_append(idx, buf, len, &_stats)); ++ return (_stats.total_objects > 2) ? -1111 : 0; ++} ++ ++void test_pack_cancel__foreach_with_cancel(void) ++{ ++ git_indexer *idx; ++ ++ seed_packbuilder(); ++ ++#ifdef GIT_EXPERIMENTAL_SHA256 ++ cl_git_pass(git_indexer_new(&idx, ".", GIT_OID_SHA1, NULL)); ++#else ++ cl_git_pass(git_indexer_new(&idx, ".", 0, NULL, NULL)); ++#endif ++ ++ cl_git_fail_with( ++ git_packbuilder_foreach(_packbuilder, foreach_cancel_cb, idx), -1111); ++ git_indexer_free(idx); ++} ++ ++void test_pack_cancel__keep_file_check(void) ++{ ++ assert(!git_disable_pack_keep_file_checks); ++ cl_git_pass(git_libgit2_opts(GIT_OPT_DISABLE_PACK_KEEP_FILE_CHECKS, true)); ++ assert(git_disable_pack_keep_file_checks); ++} +diff --git a/tests/resources/small.git/HEAD b/tests/resources/small.git/HEAD +new file mode 100644 +index 0000000000000000000000000000000000000000..cb089cd89a7d7686d284d8761201649346b5aa1c +GIT binary patch +literal 23 +ecmXR)O|w!cN=+-)&qz&7Db~+TEG|hc;sO9;xClW2 + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/config b/tests/resources/small.git/config +new file mode 100644 +index 0000000000000000000000000000000000000000..07d359d07cf1ed0c0074fdad71ffff5942f0adfa +GIT binary patch +literal 66 +zcmaz}&M!)h<>D+#Eyypk5{uv*03B5png9R* + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/description b/tests/resources/small.git/description +new file mode 100644 +index 0000000000000000000000000000000000000000..498b267a8c7812490d6479839c5577eaaec79d62 +GIT binary patch +literal 73 +zcmWH|%S+5nO;IRHEyyp$t+PQ$;d2LNXyJgRZve!Elw`VEGWs$&r??@ +Q$yWgB0LrH#Y0~2Y0PnOK(EtDd + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/applypatch-msg.sample b/tests/resources/small.git/hooks/applypatch-msg.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..dcbf8167fa503f96ff6a39c68409007eadc9b1f3 +GIT binary patch +literal 535 +zcmY+AX;Q;542A#a6e8^~FyI8r&I~hf2QJ{GO6(?HuvEG*+#R{4EI%zhfA8r{j%sh$ +zHE~E-UtQd8{bq4@*S%jq3@bmxwQDXGv#o!N`o3AHMw3xD)hy0#>&E&zzl%vRffomqo=v6>_2NRa#TwDdYvTVQyueO*15Nlo%=#DXgC0bhF3vTa`LQGaO9;jeD$OP?~ +za$G4Q{z+Q_{5V?5h;a-noM$P{<>Q~j4o7u%#P6^o^16{y*jU=-K8GYD_dUtdj4FSx +zSC0C!DvAnv%S!4dgk +XB^)11aoGMJPCqWs%IS0YSv(eBT&%T6 + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/commit-msg.sample b/tests/resources/small.git/hooks/commit-msg.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..f3780f92349638ebe32f6baf24c7c3027675d7c9 +GIT binary patch +literal 953 +zcmaJy@-{3h^^Cx;#d0zEA@DDc$nY4ez&|=%jTg@_HU*ub=!!y$xW09TSjlj +z(`I@QCsM`!9&80$I98wsQ8yK#)Orb<8re8FjkKh630D$QUDwi~(gkX=RunYm$rDjk +zlp%RUSnzA#6yjdG5?T?2DcYKp+v_lts0ljn&bh3J0bD5@N@1UKZ190O6ZeWr-BuZ^ +zWRebCX%(%=Xoj#(xYk1Cjtr!=tyBesf@m6}8zY6Ijbz9i9ziI_jG9MvR +zDH*e>^ga9IR?2wrSrAVm;eButj4Y>7(E2?b~jsu>& +zRKCJ7bp#19sqYh627wD%D9R$8=Ml$TNlumDypl~$jBu*G>5fIR^FB0h0Ex&TGZNr> +zL5hs1_K>taRb!|ThN9ns7^@4MXKP+6aGI_UK)T-M#rcP$;kN(Vcf#P)+5GzWa{l@J +z>-E{`$1iiNVYxq27}j;uo%;)r3kJI2xCFF~Ux;$Q%) +wjbk6JlDCM`jU&P+UVOvg`|iYl<7~9k>HHB4I;pdlQ=I-^$DrHaN$@lH1?P!0U;qFB + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/fsmonitor-watchman.sample b/tests/resources/small.git/hooks/fsmonitor-watchman.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..41184ebc318c159f51cd1ebe2290559805df89d8 +GIT binary patch +literal 4777 +zcmbtYYi}F368$Xwipg4lq(BeHMvzvH-4;n7DGJBPqq#tw3aed8+IU5-m)yvL>;Cqh +z8FFRGj$`9CA8aoJ?j^$%==FV``-=rhLcPW`McSytRm~mEO7_&_cAVZrf1fFy*ha@8oe%*-aBYE +zcjzZg>LOkgxuUr-XJnHyD;zmPnRaSc#!k_P*d_BttRdc+J6G7za5#+^Y1nkc2Oowk`ya47uUR3Feu?B(w;S{(VYzxh}q-=#zP@uxSx{wbyPUMFU;K(06)$o{07&3yI?q{GqMcQ1c_^M<0< +zF4acAV)Il-V(rCTC1(;bsZ*}bl8dmejAk~yb`B}!^0;g^(o9kGUfZfDOvyp@x4OQt +zSgWh6T|3eq;9MFs8-#z+FDM1h(IjRUP|``PxupgJ7CUHOH90gbgl^2~97`?_X{P)) +zB*$r1cDlF-%azKND}?Gv`2K8-9v5e`gQoft=j?T<&a13c^!wY_$D`5z-X1g?ty&6- +zQN50{8?bUk9AI->^W@~~nkOghHIC2YN+AXkLQG_2-{Pq3%{`3KUMeG$iIn%%^6*NYb +zn|_BdV#C)n4565VccX;uT8&z3vSi!HXGbUj2B!R +zdz~&#fk#L-&k$fLwo$4?>12g@AXOKFekuo#6EHB%gmpD?1eyh%N8s{2wGoTu +z*@6cEZ^ZW!FAF_|JL`NkV7k}0ow|-2jHwbgH0;c@Dq*o?@&c*HnGdyx6^su8Qk%2{ +z*ye(dxO*6-&>qn1+zw}tc6;=sOX{4WB=VqjTS^))y1jlX2Q;=e!qMmFA5lC$#;BxC +z=Y%tRpWxb+_uQAvAw7Q{HGV#R$xb&udLCzZ+HN?kTyB};1EJ8UlQ5!>5eGW@)RX0n +zkjj>EF!3=0Gl^8dzv$B^NMGRxJoqN4A`xq-@wCbrx*u2NmIJ1xZ%H +zh;{|4T3(!E9sY#Ni(wUJYs1MmIc9bl)(4Nl3_wD_BWB>i<1S(LX7m*{Q7PU$muMS* +zM!%0EZx-Vw=Zey;erC?SNxF;pY@^A%-krqzfLV2meBp1vWdyArFYn`DD19T)Hw(?n +z)}{NP(Lk(o*?gl#B@pP7^*r|=;PIDT4|F#{2Hzh-AL0Rv$6uT;n|WzE4=slK?on@(fZeGhRgQCu56qB +z{+n81Az96qnQjMY*-*r-KV*7;Z#4QuJRJJV$M^KdldiMhj?ImK6~FvwJ*L5a){QoM=L5TYHkGO1$UrO3`a>{?Opw|b +zG(#59NQ#jFL9v~vgOVkM@^^(^A}onOE))yWEwhIlk&{ZyseZ^O0b=w8&O=BK{k<5B +k^Q-B@eG}LeHrquz%(SVEp_N)VhYZikCW__82JXfD17`J9Qvd(} + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/pre-applypatch.sample b/tests/resources/small.git/hooks/pre-applypatch.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..625837e25f91421b8809a097f4a3103dd387ef31 +GIT binary patch +literal 481 +zcmY+ATTa6;5Jms9iouO45IBJXEg&Jm9@v1LPHMM_ZR|;#6tQh$71hSXq*MxP;V& +zj0cY7SCL=x4`a46sF)C>94Gk%=3q$W2s;j6iHtB2$R0%gix4oK@&T~=ALd_o*CKxt +I-`Pv{1Bpzc>;M1& + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/pre-commit.sample b/tests/resources/small.git/hooks/pre-commit.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..10b39b2e26981b8f87ea424e735ef87359066dbb +GIT binary patch +literal 1706 +zcmZuxU2ohr5PY_N#pZ0-F<{-v&v-X^RA+u>k}E$4d&uD7=g_fA8+pNNV=4s0|iD3p<=DTXClTS +zXV23tJ;ECmN@M0j@zUAKEYW@3bv!SeYZ8ZH`YQNTApFVNc;F|9r5p4TqGs=>8E?6y +zi|gY{iM#PG1nL?UE9YCnWTk72kgZPG*Usqw!~Qd3c?~@w2?%eg@~)+VlSs6N5Yf2^ +zz;owF#K#r^&KMq1A`oqVGFpD&-!Pv|Rc +zO3KSqA@h9nSc%bm`0)Amk6*J}@14J*1-219l%%7D!Pl}UK>|lVi0Dfgu2jN3WC!uL +z0ej??b2iSehVgdnWHmZV4kUo*QL#aiIp}U=9x)IXk}JJ7VQ;CI9Rtn5e0VcjbYcVt+`x5D+svCGD;Z5hm*E$jSEQZ%SQ(}oLgslTvrKK@9Qf#b!hajVFnp9@oIix;NcI9Wk +xjnh0ya!AWet{I7YpD;y6HXyzI*lfSvH=o6*7mJZPkuaYpm>vzZ`wyGEBtOQPo|pgt + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/pre-push.sample b/tests/resources/small.git/hooks/pre-push.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..02cbd80c287f959fe33975bb66c56293e3f5b396 +GIT binary patch +literal 1431 +zcmaJ>U60!~5PUX&#a1@z9B{IIZkjLT0t5kq9#8~D(I5{+8&J~9;#ndUk~-ZT`r|uG +z$#K$$J{TsKs*LP1}9!GoZ@4I4myMMG_di|of +z%?llx{O8TS-#^;(OioEmPy%kwWQBA1OMzV{hsQ8XFzS1k!~YQoLa5 +zhtP1fA$q6VmMbbAC_9)4I628k*O5J$NR19uHe4QYDK<==I~SQk)Nu%xQ~KH +z53w=!ke(FGb_PpnZfd*+hnXDTn;2*`u^~;?+5C~cn?bRka7NR%06%e6O91{MAgN6J +zmlO8{Biw4&wr&&(z4p3eln`E}XR9m9bNYZ7Ibrg(4yZIXrfgD7N*AFD7L3YSM#j}% +zo__rOS5fr;@8UM<6cl+cv_$YB$PQ&9dv($eM*))g!_cu!QcSh-mqE9i#QDZT)=o#` +z?8!RtE?w6p?GkGZ-6yt_p~5~4ecu|Sf^)6096%h*q-eNiEA1;Xwg)p~Q&iGSG7-IQ +z9aII&`ps$WOojFA`*bjGkFk|E@sHHuD}W^d`7YJ3YE^zrQnqR +zGoq?;YGKe)93o|_=^f%3U1KYZGPOXRRxK7w`UUbMMa3<86OmVH!EKP$8RCrn9mWX+ +zC?9yF!fRVLmud3hF<}x;;sR}f(*r}6Gap3fR6zLHR~kbMgD{98N`L+r&?3p~*0+FX +zcAL%j=(SO}xTJUTvA`&Lf`2mv4koPG9&|;2+68$XxiXKL@ma;l5d2^5Ba_rPh_DHI-u1#&_upttZXp;no03$20|NFiM +zK#D#xQ>!Z3JkX8T-LDVm!B5j7y_{;JDmmTTef+K1oIiPzeEr+Ai*<2PUgnG4^ZB>p +z_fkAvoR1emuf~ri^K$-px=4#D-vY9w& +z`bCv#2zVn=YnJyeNey(Y +zRh`9vtLw~A+5zsjp|W0Nsa|29Rm!B>OoG5a+vi;ari8O>KkU!KAWg_fa3btK2x*_@ +z0bEc7J;Ubghm}n9bOi(Sv_B66nQ7U)J7f0fO}8Wuf*uorcIgEG +zOHc|-V6+HlRhOP}?Cn?@5iwSl43abmBA^2lyL$+cpabCGVES+v^j^FO_}?FIp%En%Ll?Z*7*}TwrZyg5OSZ9rY-`aU~Mc-jjv{Ll)FLMgtB4ujktfQ`Xhqrka +zT=P!A;9w^;Z?PqpLwOLu=cj3L>TdUKw2;DMu)`oVkj}#bcDx4tYg=j%D`+i{W~fVM +zVmZ>W9VMyin9c-0KzI_;iZ-g|OyzuG`Yq%(%dvl;ifnVr0;jWE&S`z|rQu=!yHBBO +zx`OJ;oOQ(KKM<$(bC38o>pD0%|HA(E0TRw7qj$fJ_pRN+7Nm>dSC(gLg{(`t+5Z=?o+}wXU4tHy+&%F&aRhFebeEhR2R5|$#Ycbp^w@t +zTl%=f1t=w+WpJzF<|CE@?SCNAz)%9?w33lQ8vrHJqPfH9@}qs*QXOG71W=ylx;wOB +zcx!Bj^)Yy6WX$a^vBkBJ5CobqlaDx_B0c<3b+8)f84LCrt;e;qxc+7>VbwVK{skNv!wvBiTa^9Iu +zkwP;VK)jH$WJ{`MRwAA9fal!y0dtV;FWg8PTkWU>CwnqD>1ZX2B@;$DlX%C5MI+}{ +z9xQVnffR*~v2KAUj*hCdgul~`bk#mk`o>zk9)<2Uc8?hUZAEvd!`9em)~$Z)zev>w^8 +zyAgCP_$&Y)7HSQ84`xG}OeTavaEswwF|8Xpi5iZzZa@hCiv(J-%bfFC&)HLlO+Rhw +zG6g?9eL5&A!SuJnQ6}LxG%tU+@vZ`i+!+Rz6iYvsTdhnPo7lW{m-}{hya@viX4)XZ +zngaw+j;gloB#|UwI@8sOmQpc`h+bicQJnQIB5eifIMQNgD2+oai33m!34~xU|0Azj +zhu$8z+T5^;Pxx@d{N)pzOJLSa^e;aDf$W%N5XcOf!mGC9l9j$Ev2h6N+6ZQC+CJzl +zaM7?S!SrFLS2DASjj(h6y1WN3N?|bmqmyzm!&nLoE|`rKBOc_yDF$a#FsUn!IQf(t +zdC&Us(kQz*7mvH^j*^MC@>wTDb}g%~sx*ng#>{@lR=XG-Z5_ +z#<9*Oh0joMzt;nS)ObAp)347`D=}r-;nV!TbIq&xrGRGsF6fZg+!VkfUei@_&l-M& +zPqQ+Dw)RV}+)I8RuqAxa`Pv8e&!_gXS=e2-un>=Ktn}-;%lLZxaVn?Q>yZCb2R3Wk +z77zr%;Rq&h|2ncqyKYmFI0148JVY7Q$V5p=dWj+Qqpu%i|xp2C=WaOb2Wudn^h0EcD%$p9YVU1fnoRV9`(cy(vv6K>FXS!2jY>1GnU--7)4usH&K +zao*&P^@9~YmUe|ZdLW@C>H;!*Vt3>Nw4M*;=?j(TBD#O@XCv0|MEhA;z}kTFRv@`tPHhp=&Yh +zg%Zhg4i7o_k{a5i&f5;tZ==%}^Sn4aD_6%qs_XAuJt&EumdH4Yu`UjT<-+XHTuHss+b +YOmM2;hq8Egm*4=7_P9T{21QBYH*F=mfB*mh + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/prepare-commit-msg.sample b/tests/resources/small.git/hooks/prepare-commit-msg.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..b1970da1b6d3f42f00069fd17c325de72cda812e +GIT binary patch +literal 1702 +zcmb_cTW{Mo6n>t6#i?x6xmZ$SFLf{QfG*3r0L?Pg?px55l8$UTGO3bO;spKi{V3XX +z))weX0X>M9bNMcZ-6yG%>(n}JI2|25dr}WZBP@ih?JX^+@ +zu#5O48P>yRX(mfDIhYP)doc1&TADZa@ZGpusJ$6G+e$ZMcmC +zoOosDQPS}l{H?YPsq(4;0SGkATa9eeqAaDcjq8n2wALbFwU@2i@FAaRV!=uw-nwx1gKn2SvY +z>Ff>;2sg!+Hxfkwv1lsiii=p6WenF=5)6LZcQaZ=aS_}+-4Y&?!@HWh|<^gJ21!|T@+%On#w6azxPHV}XsRbe*w +zR_TZ2XEsQa1lPK~biYqg@0-RW@5J1@=<87cFzEUABdCoFH2CZo?}l(Z*!OFqUxo>K +z_d`l#4d9|H6;VPT{X?^{VJ>oL|D7K{BJwwqB>`YcPoGk+9hbvHnoQ{EM|kPgD_`wk +zKm4#2xu;-y`RAm!=L_BnLvJ8$AZm8@?)v<%vwvsw8AF2x6!mTT;c72A_~U9nIq0ST +zv)N0!I!^1p=g8-RQfx5)E_Mb_4I2vtQpI30XZ&t-9h5!Hn + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/hooks/push-to-checkout.sample b/tests/resources/small.git/hooks/push-to-checkout.sample +new file mode 100755 +index 0000000000000000000000000000000000000000..a80611e18896f212c390d845e49a3f6d5693b41d +GIT binary patch +literal 2840 +zcmai0U31$u5PXh)#YOS7cE^-rw@uolNhe9&aUS|HtvhX>G$45tVUYj>fRdF?|9kfU +zNR~aG=E)WbEbeyq7JTw}ZuHIE2kUtL<AoeCNptd-NM1aZLhESzC;I`+Ns +zfmNNjdAp^W8#Q*}l>CT7RB9F5(BbI8ly2l~+E};JW|>&d1)=epZ-8vm8ppkbEVn#R +zt30a5A-c(YQR8eM5%;|UAnO>rt!&@x@G@yp+92%w-}%(5P_+P&Wf_zb$f-Qrl5(7z +z2ah(bkE;!DK(&aAMuQ%1TS>ai?wSXCOCSj=_}8x4IbCx^$}9q)whwv)SBt| +zg#MX4;;Oau`m=MI9(^&zPbueY@~>3*ixX%mvR5m_1&nAg@ZKvY1E$O}&EtLiG;mhV +z1xhMIm~fGjmf_#{62f`y;09?I7M1W2tWQvz<}i9lR>OpQyUJi45_&*pQus&EkwY<> +zI|ZAx=*3i9a-)g)hXkvO7>UJ5MNgL(Z+-wpXVcgbSgpmFmbf1~DPA(OVGI&FNLeIE +zNH!_aiH$vsif$_j7=T2{cS(!DOI`~bn@)vSd-0d7xL=DF;UNP|tW}4ih>DvHtu9tY_pbJ6x(6E*hxgC +zzNDao%qlr-IE%YGbS4hF!n!on7#W3$bX-_hbZAaws^nHu#)Dx=WzdbJ>AKzAy@T$x +zSWE^x9+|TEHVEPyaPYa0DOChp?AeHSBBDbZNokQpAY{lE!7geZI=jV)G^2@l)&91Zb1+`T+oq9wWF +zRV~kGTGce0O~p^6mj{kT5kL(pv>r;Lvd7VDX*P>A^Th`$3cWO0L81p4Ysdo3ZP1(SrR-peEdTo;-@bkB((G +zPHYQXUL!@Q$e(OQ;R9r%@Afz+50I7>*^^c&&|E*r-jN)LH=pM4AqMwWxSv|nqjddE +Z4{_hwv8!W(T +zYw`X3V>TCdnSD1ru8&`j=2DIPbCT@SnIgUw>$+lEYP}+x8(BMYnr=iT3*ndq)xzaV +z>I+qjv}vC#8_9M+b1p#uNS0M0)q

8!3p_LRQ0MA3M`!2foxzRUjbFY@}O~(ki=S +zqscnq8cU*dY)D$$cqE}n)V0yIk>CNKHCrndOtSP*HbOb;nbwAHSb;R+gs^?^Dve%) +zoW}t(*D}$>O3ab0TS^-;J|u&sb-PkZzo#kn*#xYt(;FGuwzSb^g&RDiGcOz9TB;Hu`nJh)$W=C=XCSm2AY=$w3G3P-V#Oo+N*;#2 +z4ijJ-pBZ=;T(RTgp_HYrD!uW-dTMfkuqY5jwOy)~gM;#=P^i{!l7`pXTS^s(&^{RU +zydaw}OpS#^D1cXM8?FW+fh`t7D(g;yr6|}fdaNtZBx3hlK~IpkTu3!Qq%R+zAo#t}Bs8^3$vHD+-TGT@`F>H1Cc#WAVW;&$S6%fE2d6@kLS0g&ihIM{}0z +z8#XhD>b>3{(BH|Px7}&lJ4%y1v(CihZJx@8MPoGdl*BJGD;usf*iS7%;{Joe; +zNFuBa>*~o&qETDPo~u&~$FxE1xb^x&(CbE`Y3GfsibL2rl+L;>P6j&Y3U>K$mkp*6 +zd`Q{<^+^&;GskGjwD-%!boR&i-TCA9UOR|@=GYb5x#+dhd7fkaVIR^pol`Mv+rUbmZ43dVL6^S7g3{NsPiG$iy$5EDB% +z6KIgnb$H(n&t3e4E6d4V7w^B?JS}JkG)PM6+X3Co`SQs($O*AA+MG~{S7RJ=cy-l& +z>~%3y`tjfx2>uOutB_^s +ziwG=e=ch|FQ0IkN91US7rhdQkXhwwt$gU0WEVDjo=IPb+?6PC=s8}J*ua(Ms))`UL +fi$|vMHn?H_tSE3ettp-hLlsZCxaLX8(nU;bVRB;Ce6@s#eu2|WvLz>- +zvy(&>Gyfp@+BtKnpqWkKi^+v{4jn_pNw_zeuxETifiGO|)w}OANj2n2D^K=o3j6P6uOL70#cbA{uzWXDlk1wr9GV1X(2W{RuTvjXV +zCmd8u +zH%V`94=q3)Dk)PHNrnFC(T1)Om6f{Usj;u1R->&XoCYVK2V3ZlgZuF?N}1+33OER*x +z*9Z=L=zI8CN>A_^jYjt0F$psO$sL=38q5q|SG)qCN6{^>RFh5E&l5GZ$pEahnF&d+ +z5c>64t}uJPkf~_!VUj#&N%nC-gUMj%=@B=!V>&}xtj2%@-mOm#rQUSJ3(ccmc+fza +znZ#uxF>N?QN5UrIEd!5RgHEfW#;(nKYF+D<*rdshJ$X-z2OZ2X;)nn@KSVdVhaA?}@3;6gZxb4v +zozoWSr{{+!h}zGpumG3H`=AvWpm^9kW;J$Jp^Xl*?8ckr`fqN%c|Z;VC0|cM4vSrk +zH_O8Yvh85nvJp^;``wo8=z0f`FWg?`>gO#y1hjX1{}rTlg9rwIKia8eyGexA3GnuR +z`Rg~XZoW;0pA)vI8=p5!+6sIn#C^FCvR>ffv39h6SCNi9v);%WD;WZ`of_MgwyRWy +z-yY%n*Y>X89W-v4`Ff%bx$Vkn}$!Ay}rnY6F$m-Kg*KD_+;Lx#g4|^&N +I02NaX#p`nv=Kufz + +literal 0 +HcmV?d00001 + +diff --git a/tests/resources/small.git/objects/af/5626b4a114abcb82d63db7c8082c3c4756e51b b/tests/resources/small.git/objects/af/5626b4a114abcb82d63db7c8082c3c4756e51b +new file mode 100644 +index 0000000000000000000000000000000000000000..822bc151862ec3763cf2d3fa2372b93bbd3a4b65 +GIT binary patch +literal 30 +mcmb>0i}&W3IZ_@1U=^!a~EV1casc=c+{&un1qQN*i9hD|0|m(2n|iwp*q%W +z%N;b$hu%cM`$TMo*~EnC1BFP&Pfj~;jZVKXQ96s_PhV<-XAROi+@-v8dBLUa`!;GB +k^iXlEv8$>R)1G>9th&t3j;s7J{?^9n|7U^`%mXoWC24Q^m!3%@{ + +literal 0 +HcmV?d00001 + diff --git a/scripts/bigsur-nixbld-user-migration.sh b/scripts/bigsur-nixbld-user-migration.sh index 0eb312e07..bd9eeb920 100755 --- a/scripts/bigsur-nixbld-user-migration.sh +++ b/scripts/bigsur-nixbld-user-migration.sh @@ -1,8 +1,8 @@ #!/usr/bin/env bash -((NEW_NIX_FIRST_BUILD_UID=301)) +((NEW_NIX_FIRST_BUILD_UID=351)) -id_available(){ +id_unavailable(){ dscl . list /Users UniqueID | grep -E '\b'"$1"'\b' >/dev/null } @@ -15,7 +15,7 @@ change_nixbld_names_and_ids(){ while read -r name uid; do echo " Checking $name (uid: $uid)" # iterate for a clean ID - while id_available "$next_id"; do + while id_unavailable "$next_id"; do ((next_id++)) if ((next_id >= 400)); then echo "We've hit UID 400 without placing all of your users :(" diff --git a/scripts/flake-regressions.sh b/scripts/flake-regressions.sh deleted file mode 100755 index d76531134..000000000 --- a/scripts/flake-regressions.sh +++ /dev/null @@ -1,27 +0,0 @@ -#! /usr/bin/env bash - -set -e - -echo "Nix version:" -nix --version - -cd flake-regressions - -status=0 - -flakes=$(find tests -mindepth 3 -maxdepth 3 -type d -not -path '*/.*' | sort | head -n25) - -echo "Running flake tests..." - -for flake in $flakes; do - - if ! REGENERATE=0 ./eval-flake.sh "$flake"; then - status=1 - echo "❌ $flake" - else - echo "✅ $flake" - fi - -done - -exit "$status" diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 24c9052f9..89c66b8f4 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -4,7 +4,17 @@ set -eu set -o pipefail # System specific settings -export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}" +# Notes: +# - up to macOS Big Sur we used the same GID/UIDs as Linux (30000:30001-32) +# - we changed UID to 301 because Big Sur updates failed into recovery mode +# we're targeting the 200-400 UID range for role users mentioned in the +# usage note for sysadminctl +# - we changed UID to 351 because Sequoia now uses UIDs 300-304 for its own +# daemon users +# - we changed GID to 350 alongside above just because it hides the nixbld +# group from the Users & Groups settings panel :) +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-351}" +export NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-350}" export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 6aee073e3..a487d459f 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -23,10 +23,10 @@ readonly RED='\033[31m' # installer allows overriding build user count to speed up installation # as creating each user takes non-trivial amount of time on macos readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} -readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" readonly NIX_BUILD_GROUP_NAME="nixbld" # each system specific installer must set these: # NIX_FIRST_BUILD_UID +# NIX_BUILD_GROUP_ID # NIX_BUILD_USER_NAME_TEMPLATE # Please don't change this. We don't support it, because the # default shell profile that comes with Nix doesn't support it. @@ -530,9 +530,7 @@ It seems the build group $NIX_BUILD_GROUP_NAME already exists, but with the UID $primary_group_id. This script can't really handle that right now, so I'm going to give up. -You can fix this by editing this script and changing the -NIX_BUILD_GROUP_ID variable near the top to from $NIX_BUILD_GROUP_ID -to $primary_group_id and re-run. +You can export NIX_BUILD_GROUP_ID=$primary_group_id and re-run. EOF else row " Exists" "Yes" diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index a62ed7e3a..dc373f4db 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -5,6 +5,7 @@ set -o pipefail # System specific settings export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" +export NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service @@ -95,6 +96,9 @@ poly_configure_nix_daemon_service() { if [ -e /run/systemd/system ]; then task "Setting up the nix-daemon systemd service" + _sudo "to create parent of the nix-daemon tmpfiles config" \ + mkdir -p "$(dirname "$TMPFILES_DEST")" + _sudo "to create the nix-daemon tmpfiles config" \ ln -sfn "/nix/var/nix/profiles/default$TMPFILES_SRC" "$TMPFILES_DEST" diff --git a/scripts/meson.build b/scripts/meson.build new file mode 100644 index 000000000..777da42b1 --- /dev/null +++ b/scripts/meson.build @@ -0,0 +1,20 @@ +configure_file( + input : 'nix-profile.sh.in', + output : 'nix-profile.sh', + configuration : { + 'localstatedir': localstatedir, + } +) + +foreach rc : [ '.sh', '.fish', '-daemon.sh', '-daemon.fish' ] + configure_file( + input : 'nix-profile' + rc + '.in', + output : 'nix' + rc, + install : true, + install_dir : get_option('profile-dir'), + install_mode : 'rw-r--r--', + configuration : { + 'localstatedir': localstatedir, + }, + ) +endforeach diff --git a/scripts/nix-profile-daemon.sh.in b/scripts/nix-profile-daemon.sh.in index eb124c0b5..59c00d491 100644 --- a/scripts/nix-profile-daemon.sh.in +++ b/scripts/nix-profile-daemon.sh.in @@ -52,7 +52,7 @@ elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS else # Fall back to what is in the nix profiles, favouring whatever is defined last. check_nix_profiles() { - if [ -n "$ZSH_VERSION" ]; then + if [ -n "${ZSH_VERSION:-}" ]; then # Zsh by default doesn't split words in unquoted parameter expansion. # Set local_options for these options to be reverted at the end of the function # and shwordsplit to force splitting words in $NIX_PROFILES below. diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index e868399b1..2d6bf6e95 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -1,31 +1,35 @@ # This file is tested by tests/installer/default.nix. -if [ -n "$HOME" ] && [ -n "$USER" ]; then +if [ -n "${HOME-}" ] && [ -n "${USER-}" ]; then # Set up the per-user profile. - NIX_LINK="$HOME/.nix-profile" - if [ -n "${XDG_STATE_HOME-}" ]; then - NIX_LINK_NEW="$XDG_STATE_HOME/nix/profile" + if [ -n "${NIX_STATE_HOME-}" ]; then + NIX_LINK="$NIX_STATE_HOME/profile" else - NIX_LINK_NEW="$HOME/.local/state/nix/profile" - fi - if [ -e "$NIX_LINK_NEW" ]; then - if [ -t 2 ] && [ -e "$NIX_LINK" ]; then - warning="\033[1;35mwarning:\033[0m" - printf "$warning Both %s and legacy %s exist; using the former.\n" "$NIX_LINK_NEW" "$NIX_LINK" 1>&2 - if [ "$(realpath "$NIX_LINK")" = "$(realpath "$NIX_LINK_NEW")" ]; then - printf " Since the profiles match, you can safely delete either of them.\n" 1>&2 - else - # This should be an exceptionally rare occasion: the only way to get it would be to - # 1. Update to newer Nix; - # 2. Remove .nix-profile; - # 3. Set the $NIX_LINK_NEW to something other than the default user profile; - # 4. Roll back to older Nix. - # If someone did all that, they can probably figure out how to migrate the profile. - printf "$warning Profiles do not match. You should manually migrate from %s to %s.\n" "$NIX_LINK" "$NIX_LINK_NEW" 1>&2 - fi + NIX_LINK="$HOME/.nix-profile" + if [ -n "${XDG_STATE_HOME-}" ]; then + NIX_LINK_NEW="$XDG_STATE_HOME/nix/profile" + else + NIX_LINK_NEW="$HOME/.local/state/nix/profile" + fi + if [ -e "$NIX_LINK_NEW" ]; then + if [ -t 2 ] && [ -e "$NIX_LINK" ]; then + warning="\033[1;35mwarning:\033[0m" + printf "$warning Both %s and legacy %s exist; using the former.\n" "$NIX_LINK_NEW" "$NIX_LINK" 1>&2 + if [ "$(realpath "$NIX_LINK")" = "$(realpath "$NIX_LINK_NEW")" ]; then + printf " Since the profiles match, you can safely delete either of them.\n" 1>&2 + else + # This should be an exceptionally rare occasion: the only way to get it would be to + # 1. Update to newer Nix; + # 2. Remove .nix-profile; + # 3. Set the $NIX_LINK_NEW to something other than the default user profile; + # 4. Roll back to older Nix. + # If someone did all that, they can probably figure out how to migrate the profile. + printf "$warning Profiles do not match. You should manually migrate from %s to %s.\n" "$NIX_LINK" "$NIX_LINK_NEW" 1>&2 + fi + fi + NIX_LINK="$NIX_LINK_NEW" fi - NIX_LINK="$NIX_LINK_NEW" fi # Set up environment. diff --git a/scripts/sequoia-nixbld-user-migration.sh b/scripts/sequoia-nixbld-user-migration.sh new file mode 100755 index 000000000..88e801706 --- /dev/null +++ b/scripts/sequoia-nixbld-user-migration.sh @@ -0,0 +1,169 @@ +#!/usr/bin/env bash + +set -eo pipefail + +((NEW_NIX_FIRST_BUILD_UID=351)) +((TEMP_NIX_FIRST_BUILD_UID=31000)) + +nix_user_n() { + printf "_nixbld%d" "$1" +} + +id_unavailable(){ + dscl . list /Users UniqueID | grep -E '\b'"$1"'\b' >/dev/null +} + +any_nixbld(){ + dscl . list /Users UniqueID | grep -E '\b_nixbld' >/dev/null +} + +dsclattr() { + dscl . -read "$1" | awk "/$2/ { print \$2 }" +} + +re_create_nixbld_user(){ + local name uid + + name="$1" + uid="$2" + gid="$3" + + sudo /usr/bin/dscl . -create "/Users/$name" "UniqueID" "$uid" + sudo /usr/bin/dscl . -create "/Users/$name" "IsHidden" "1" + sudo /usr/bin/dscl . -create "/Users/$name" "NFSHomeDirectory" "/var/empty" + sudo /usr/bin/dscl . -create "/Users/$name" "RealName" "Nix build user $name" + sudo /usr/bin/dscl . -create "/Users/$name" "UserShell" "/sbin/nologin" + sudo /usr/bin/dscl . -create "/Users/$name" "PrimaryGroupID" "$gid" +} + +hit_id_cap(){ + echo "We've hit UID 400 without placing all of your users :(" + echo "You should use the commands in this script as a starting" + echo "point to review your UID-space and manually move the" + echo "remaining users (or delete them, if you don't need them)." +} + +# evacuate the role-uid space to simplify final placement logic +temporarily_move_existing_nixbld_uids(){ + local name uid next_id user_n + + ((next_id=TEMP_NIX_FIRST_BUILD_UID)) + + echo "" + echo "Step 1: move existing _nixbld users out of the destination UID range." + + while read -r name uid; do + # iterate for a clean ID + while id_unavailable "$next_id"; do + ((next_id++)) + # We really want to get these all placed, but I guess there's + # some risk we iterate forever--so we'll give up after 9k uids. + if ((next_id >= 40000)); then + echo "We've hit UID 40000 without temporarily placing all of your users :(" + echo "You should use the commands in this script as a starting" + echo "point to review your UID-space and manually move the" + echo "remaining users to any open UID over 1000." + exit 1 + fi + done + sudo dscl . -create "/Users/$name" UniqueID "$next_id" + echo " Temporarily moved $name from uid $uid -> $next_id" + + done < <(dscl . list /Users UniqueID | grep _nixbld | sort -n -k2) +} + +change_nixbld_uids(){ + local existing_gid name next_id user_n + + ((next_id=NEW_NIX_FIRST_BUILD_UID)) + ((user_n=1)) + name="$(nix_user_n "$user_n")" + existing_gid="$(dsclattr "/Groups/nixbld" "PrimaryGroupID")" + + # we know that we have *some* nixbld users, but macOS may have + # already clobbered the first few users if this system has been + # upgraded + + echo "" + echo "Step 2: re-create missing early _nixbld# users." + + until dscl . read "/Users/$name" &>/dev/null; do + # iterate for a clean ID + while id_unavailable "$next_id"; do + ((next_id++)) + if ((next_id >= 400)); then + hit_id_cap + exit 1 + fi + done + + re_create_nixbld_user "$name" "$next_id" "$existing_gid" + echo " $name was missing; created with uid: $next_id" + + ((user_n++)) + name="$(nix_user_n "$user_n")" + done + + echo "" + echo "Step 3: relocate remaining _nixbld# UIDs to $next_id+" + + # start at first _nixbld# not re-created above and increment + # until _nixbld doesn't exist + while dscl . read "/Users/$name" &>/dev/null; do + # iterate for a clean ID + while id_unavailable "$next_id"; do + ((next_id++)) + if ((next_id >= 400)); then + hit_id_cap + exit 1 + fi + done + + sudo dscl . -create "/Users/$name" UniqueID "$next_id" + echo " $name migrated to uid: $next_id" + + ((user_n++)) + name="$(nix_user_n "$user_n")" + done + + if ((user_n == 1)); then + echo "Didn't find _nixbld1. Perhaps you have single-user Nix?" + exit 1 + else + echo "Migrated $((user_n - 1)) users. If you want to double-check, try:" + echo "dscl . list /Users UniqueID | grep _nixbld | sort -n -k2" + fi +} +needs_migration(){ + local name uid next_id user_n + + ((next_id=NEW_NIX_FIRST_BUILD_UID)) + ((user_n=1)) + + while read -r name uid; do + expected_name="$(nix_user_n "$user_n")" + if [[ "$expected_name" != "$name" ]]; then + return 0 + fi + if [[ "$next_id" != "$uid" ]]; then + return 0 + fi + ((next_id++)) + ((user_n++)) + done < <(dscl . list /Users UniqueID | grep _nixbld | sort -n -k2) + return 1 +} + + +if any_nixbld; then + if needs_migration; then + echo "Attempting to migrate _nixbld users." + temporarily_move_existing_nixbld_uids + change_nixbld_uids + else + echo "_nixbld users already appear to be migrated." + fi +else + echo "Didn't find any _nixbld users. Perhaps you have single-user Nix?" + exit 1 +fi diff --git a/src/external-api-docs/doxygen.cfg.in b/src/external-api-docs/doxygen.cfg.in index 1be71d895..7ae4c83df 100644 --- a/src/external-api-docs/doxygen.cfg.in +++ b/src/external-api-docs/doxygen.cfg.in @@ -56,3 +56,5 @@ GENERATE_TREEVIEW = YES OPTIMIZE_OUTPUT_FOR_C = YES USE_MDFILE_AS_MAINPAGE = doc/external-api/README.md + +QUIET = YES diff --git a/src/external-api-docs/package.nix b/src/external-api-docs/package.nix index 743b3e9b7..0c592955a 100644 --- a/src/external-api-docs/package.nix +++ b/src/external-api-docs/package.nix @@ -1,8 +1,6 @@ { lib , mkMesonDerivation -, meson -, ninja , doxygen # Configuration Options @@ -37,8 +35,6 @@ mkMesonDerivation (finalAttrs: { ]; nativeBuildInputs = [ - meson - ninja doxygen ]; diff --git a/src/internal-api-docs/doxygen.cfg.in b/src/internal-api-docs/doxygen.cfg.in index f1ef75b38..bf4c42d11 100644 --- a/src/internal-api-docs/doxygen.cfg.in +++ b/src/internal-api-docs/doxygen.cfg.in @@ -41,21 +41,21 @@ INPUT = \ @src@/libcmd \ @src@/libexpr \ @src@/libexpr/flake \ - @src@/nix-expr-tests \ - @src@/nix-expr-tests/value \ - @src@/nix-expr-test-support/test \ - @src@/nix-expr-test-support/test/value \ + @src@/libexpr-tests \ + @src@/libexpr-tests/value \ + @src@/libexpr-test-support/test \ + @src@/libexpr-test-support/test/value \ @src@/libexpr/value \ @src@/libfetchers \ @src@/libmain \ @src@/libstore \ @src@/libstore/build \ @src@/libstore/builtins \ - @src@/nix-store-tests \ - @src@/nix-store-test-support/test \ + @src@/libstore-tests \ + @src@/libstore-test-support/test \ @src@/libutil \ - @src@/nix-util-tests \ - @src@/nix-util-test-support/test \ + @src@/libutil-tests \ + @src@/libutil-test-support/test \ @src@/nix \ @src@/nix-env \ @src@/nix-store @@ -97,3 +97,6 @@ EXPAND_AS_DEFINED = \ DECLARE_WORKER_SERIALISER \ DECLARE_SERVE_SERIALISER \ LENGTH_PREFIXED_PROTO_HELPER + +WARN_IF_UNDOCUMENTED = NO +QUIET = YES diff --git a/src/internal-api-docs/package.nix b/src/internal-api-docs/package.nix index 07ca6d4d9..993a257a6 100644 --- a/src/internal-api-docs/package.nix +++ b/src/internal-api-docs/package.nix @@ -1,8 +1,6 @@ { lib , mkMesonDerivation -, meson -, ninja , doxygen # Configuration Options @@ -32,8 +30,6 @@ mkMesonDerivation (finalAttrs: { ]; nativeBuildInputs = [ - meson - ninja doxygen ]; diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 67fef1909..1a4c76ec5 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -1,3 +1,4 @@ +#include #include #include "command.hh" @@ -9,8 +10,7 @@ #include "profiles.hh" #include "repl.hh" #include "strings.hh" - -extern char * * environ __attribute__((weak)); +#include "environment-variables.hh" namespace nix { @@ -23,7 +23,8 @@ nix::Commands RegisterCommand::getCommandsFor(const std::vector & p if (name.size() == prefix.size() + 1) { bool equal = true; for (size_t i = 0; i < prefix.size(); ++i) - if (name[i] != prefix[i]) equal = false; + if (name[i] != prefix[i]) + equal = false; if (equal) res.insert_or_assign(name[prefix.size()], command); } @@ -42,16 +43,16 @@ void NixMultiCommand::run() std::set subCommandTextLines; for (auto & [name, _] : commands) subCommandTextLines.insert(fmt("- `%s`", name)); - std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n", - commandName, concatStringsSep("\n", subCommandTextLines)); + std::string markdownError = + fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n", + commandName, + concatStringsSep("\n", subCommandTextLines)); throw UsageError(renderMarkdownToTerminal(markdownError)); } command->second->run(); } -StoreCommand::StoreCommand() -{ -} +StoreCommand::StoreCommand() {} ref StoreCommand::getStore() { @@ -126,15 +127,8 @@ ref EvalCommand::getEvalStore() ref EvalCommand::getEvalState() { if (!evalState) { - evalState = - #if HAVE_BOEHMGC - std::allocate_shared( - traceable_allocator(), - #else - std::make_shared( - #endif - lookupPath, getEvalStore(), fetchSettings, evalSettings, getStore()) - ; + evalState = std::allocate_shared( + traceable_allocator(), lookupPath, getEvalStore(), fetchSettings, evalSettings, getStore()); evalState->repair = repair; @@ -149,7 +143,8 @@ MixOperateOnOptions::MixOperateOnOptions() { addFlag({ .longName = "derivation", - .description = "Operate on the [store derivation](@docroot@/glossary.md#gloss-store-derivation) rather than its outputs.", + .description = + "Operate on the [store derivation](@docroot@/glossary.md#gloss-store-derivation) rather than its outputs.", .category = installablesCategory, .handler = {&operateOn, OperateOn::Derivation}, }); @@ -238,46 +233,48 @@ void StorePathCommand::run(ref store, StorePaths && storePaths) MixProfile::MixProfile() { - addFlag({ - .longName = "profile", - .description = "The profile to operate on.", - .labels = {"path"}, - .handler = {&profile}, - .completer = completePath - }); + addFlag( + {.longName = "profile", + .description = "The profile to operate on.", + .labels = {"path"}, + .handler = {&profile}, + .completer = completePath}); } void MixProfile::updateProfile(const StorePath & storePath) { - if (!profile) return; + if (!profile) + return; auto store = getStore().dynamic_pointer_cast(); - if (!store) throw Error("'--profile' is not supported for this Nix store"); + if (!store) + throw Error("'--profile' is not supported for this Nix store"); auto profile2 = absPath(*profile); - switchLink(profile2, - createGeneration(*store, profile2, storePath)); + switchLink(profile2, createGeneration(*store, profile2, storePath)); } void MixProfile::updateProfile(const BuiltPaths & buildables) { - if (!profile) return; + if (!profile) + return; StorePaths result; for (auto & buildable : buildables) { - std::visit(overloaded { - [&](const BuiltPath::Opaque & bo) { - result.push_back(bo.path); + std::visit( + overloaded{ + [&](const BuiltPath::Opaque & bo) { result.push_back(bo.path); }, + [&](const BuiltPath::Built & bfd) { + for (auto & output : bfd.outputs) { + result.push_back(output.second); + } + }, }, - [&](const BuiltPath::Built & bfd) { - for (auto & output : bfd.outputs) { - result.push_back(output.second); - } - }, - }, buildable.raw()); + buildable.raw()); } if (result.size() != 1) - throw UsageError("'--profile' requires that the arguments produce a single store path, but there are %d", result.size()); + throw UsageError( + "'--profile' requires that the arguments produce a single store path, but there are %d", result.size()); updateProfile(result[0]); } @@ -287,51 +284,85 @@ MixDefaultProfile::MixDefaultProfile() profile = getDefaultProfile(); } -MixEnvironment::MixEnvironment() : ignoreEnvironment(false) +MixEnvironment::MixEnvironment() + : ignoreEnvironment(false) { addFlag({ - .longName = "ignore-environment", + .longName = "ignore-env", + .aliases = {"ignore-environment"}, .shortName = 'i', - .description = "Clear the entire environment (except those specified with `--keep`).", + .description = "Clear the entire environment, except for those specified with `--keep-env-var`.", + .category = environmentVariablesCategory, .handler = {&ignoreEnvironment, true}, }); addFlag({ - .longName = "keep", + .longName = "keep-env-var", + .aliases = {"keep"}, .shortName = 'k', - .description = "Keep the environment variable *name*.", + .description = "Keep the environment variable *name*, when using `--ignore-env`.", + .category = environmentVariablesCategory, .labels = {"name"}, - .handler = {[&](std::string s) { keep.insert(s); }}, + .handler = {[&](std::string s) { keepVars.insert(s); }}, }); addFlag({ - .longName = "unset", + .longName = "unset-env-var", + .aliases = {"unset"}, .shortName = 'u', .description = "Unset the environment variable *name*.", + .category = environmentVariablesCategory, .labels = {"name"}, - .handler = {[&](std::string s) { unset.insert(s); }}, + .handler = {[&](std::string name) { + if (setVars.contains(name)) + throw UsageError("Cannot unset environment variable '%s' that is set with '%s'", name, "--set-env-var"); + + unsetVars.insert(name); + }}, + }); + + addFlag({ + .longName = "set-env-var", + .shortName = 's', + .description = "Sets an environment variable *name* with *value*.", + .category = environmentVariablesCategory, + .labels = {"name", "value"}, + .handler = {[&](std::string name, std::string value) { + if (unsetVars.contains(name)) + throw UsageError( + "Cannot set environment variable '%s' that is unset with '%s'", name, "--unset-env-var"); + + if (setVars.contains(name)) + throw UsageError( + "Duplicate definition of environment variable '%s' with '%s' is ambiguous", name, "--set-env-var"); + + setVars.insert_or_assign(name, value); + }}, }); } -void MixEnvironment::setEnviron() { - if (ignoreEnvironment) { - if (!unset.empty()) - throw UsageError("--unset does not make sense with --ignore-environment"); +void MixEnvironment::setEnviron() +{ + if (ignoreEnvironment && !unsetVars.empty()) + throw UsageError("--unset-env-var does not make sense with --ignore-env"); - for (const auto & var : keep) { - auto val = getenv(var.c_str()); - if (val) stringsEnv.emplace_back(fmt("%s=%s", var.c_str(), val)); - } + if (!ignoreEnvironment && !keepVars.empty()) + throw UsageError("--keep-env-var does not make sense without --ignore-env"); - vectorEnv = stringsToCharPtrs(stringsEnv); - environ = vectorEnv.data(); - } else { - if (!keep.empty()) - throw UsageError("--keep does not make sense without --ignore-environment"); + auto env = getEnv(); - for (const auto & var : unset) - unsetenv(var.c_str()); - } + if (ignoreEnvironment) + std::erase_if(env, [&](const auto & var) { return !keepVars.contains(var.first); }); + + for (const auto & [name, value] : setVars) + env[name] = value; + + if (!unsetVars.empty()) + std::erase_if(env, [&](const auto & var) { return unsetVars.contains(var.first); }); + + replaceEnv(env); + + return; } } diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index 37b3ca4a7..0efe91a50 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -14,7 +14,7 @@ namespace nix { extern std::string programPath; -extern char * * savedArgv; +extern char ** savedArgv; class EvalState; struct Pos; @@ -25,7 +25,8 @@ static constexpr Command::Category catSecondary = 100; static constexpr Command::Category catUtility = 101; static constexpr Command::Category catNixInstallation = 102; -static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)"; +static constexpr auto installablesCategory = + "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)"; struct NixMultiCommand : MultiCommand, virtual Command { @@ -113,7 +114,9 @@ struct MixFlakeOptions : virtual Args, EvalCommand * arguments) so that the completions for these flags can use them. */ virtual std::vector getFlakeRefsForCompletion() - { return {}; } + { + return {}; + } }; struct SourceExprCommand : virtual Args, MixFlakeOptions @@ -289,10 +292,10 @@ struct RegisterCommand typedef std::map, std::function()>> Commands; static Commands * commands; - RegisterCommand(std::vector && name, - std::function()> command) + RegisterCommand(std::vector && name, std::function()> command) { - if (!commands) commands = new Commands; + if (!commands) + commands = new Commands; commands->emplace(name, command); } @@ -302,13 +305,13 @@ struct RegisterCommand template static RegisterCommand registerCommand(const std::string & name) { - return RegisterCommand({name}, [](){ return make_ref(); }); + return RegisterCommand({name}, []() { return make_ref(); }); } template static RegisterCommand registerCommand2(std::vector && name) { - return RegisterCommand(std::move(name), [](){ return make_ref(); }); + return RegisterCommand(std::move(name), []() { return make_ref(); }); } struct MixProfile : virtual StoreCommand @@ -330,19 +333,21 @@ struct MixDefaultProfile : MixProfile MixDefaultProfile(); }; -struct MixEnvironment : virtual Args { +struct MixEnvironment : virtual Args +{ - StringSet keep, unset; - Strings stringsEnv; - std::vector vectorEnv; + StringSet keepVars; + StringSet unsetVars; + std::map setVars; bool ignoreEnvironment; MixEnvironment(); /*** - * Modify global environ based on `ignoreEnvironment`, `keep`, and - * `unset`. It's expected that exec will be called before this class - * goes out of scope, otherwise `environ` will become invalid. + * Modify global environ based on `ignoreEnvironment`, `keep`, + * `unset`, and `added`. It's expected that exec will be called + * before this class goes out of scope, otherwise `environ` will + * become invalid. */ void setEnviron(); }; @@ -398,9 +403,6 @@ void completeFlakeRefWithFragment( std::string showVersions(const std::set & versions); void printClosureDiff( - ref store, - const StorePath & beforePath, - const StorePath & afterPath, - std::string_view indent); + ref store, const StorePath & beforePath, const StorePath & afterPath, std::string_view indent); } diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index fcef92487..ccbf957d9 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -18,6 +18,8 @@ namespace nix { +namespace fs { using namespace std::filesystem; } + fetchers::Settings fetchSettings; static GlobalConfig::Register rFetchSettings(&fetchSettings); @@ -119,8 +121,8 @@ MixEvalArgs::MixEvalArgs() .category = category, .labels = {"original-ref", "resolved-ref"}, .handler = {[&](std::string _from, std::string _to) { - auto from = parseFlakeRef(fetchSettings, _from, absPath(".")); - auto to = parseFlakeRef(fetchSettings, _to, absPath(".")); + auto from = parseFlakeRef(fetchSettings, _from, fs::current_path().string()); + auto to = parseFlakeRef(fetchSettings, _to, fs::current_path().string()); fetchers::Attrs extraAttrs; if (to.subdir != "") extraAttrs["dir"] = to.subdir; fetchers::overrideRegistry(from.input, to.input, extraAttrs); @@ -171,7 +173,9 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas { if (EvalSettings::isPseudoUrl(s)) { auto accessor = fetchers::downloadTarball( - EvalSettings::resolvePseudoUrl(s)).accessor; + state.store, + state.fetchSettings, + EvalSettings::resolvePseudoUrl(s)); auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy); return state.rootPath(CanonPath(state.store->toRealPath(storePath))); } diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh index 8e0a232ef..212403dd4 100644 --- a/src/libcmd/installable-flake.hh +++ b/src/libcmd/installable-flake.hh @@ -26,7 +26,7 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue Flake flake; ExtraPathInfoFlake(Value && v, Flake && f) - : ExtraPathInfoValue(std::move(v)), flake(f) + : ExtraPathInfoValue(std::move(v)), flake(std::move(f)) { } }; diff --git a/src/libcmd/installable-value.hh b/src/libcmd/installable-value.hh index 60207cd23..4b6dbd306 100644 --- a/src/libcmd/installable-value.hh +++ b/src/libcmd/installable-value.hh @@ -59,7 +59,7 @@ struct ExtraPathInfoValue : ExtraPathInfo Value value; ExtraPathInfoValue(Value && v) - : value(v) + : value(std::move(v)) { } virtual ~ExtraPathInfoValue() = default; diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 1c655a47e..49756a122 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -31,6 +31,8 @@ namespace nix { +namespace fs { using namespace std::filesystem; } + const static std::regex attrPathRegex( R"((?:[a-zA-Z0-9_"-][a-zA-Z0-9_".,^\*-]*))", std::regex::ECMAScript); @@ -90,7 +92,7 @@ MixFlakeOptions::MixFlakeOptions() > **DEPRECATED** > - > Use [`--no-use-registries`](#opt-no-use-registries) instead. + > Use [`--no-use-registries`](@docroot@/command-ref/conf-file.md#conf-use-registries) instead. )", .category = category, .handler = {[&]() { @@ -415,7 +417,7 @@ void completeFlakeRefWithFragment( : std::string(prefix.substr(0, hash)); // TODO: ideally this would use the command base directory instead of assuming ".". - auto flakeRef = parseFlakeRef(fetchSettings, expandTilde(flakeRefS), absPath(".")); + auto flakeRef = parseFlakeRef(fetchSettings, expandTilde(flakeRefS), fs::current_path().string()); auto evalCache = openEvalCache(*evalState, std::make_shared(lockFlake( @@ -1001,6 +1003,7 @@ std::vector RawInstallablesCommand::getFlakeRefsForCompletion() { applyDefaultInstallables(rawInstallables); std::vector res; + res.reserve(rawInstallables.size()); for (auto i : rawInstallables) res.push_back(parseFlakeRefWithFragment( fetchSettings, diff --git a/src/libcmd/package.nix b/src/libcmd/package.nix index cde494901..244179ee4 100644 --- a/src/libcmd/package.nix +++ b/src/libcmd/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util , nix-store @@ -38,7 +33,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-cmd"; inherit version; @@ -54,14 +49,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ ({ inherit editline readline; }.${readlineFlavor}) ] ++ lib.optional enableMarkdown lowdown; @@ -93,10 +80,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index dbce4ac50..c8258ca5f 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -2,6 +2,7 @@ #include #include +#include "error.hh" #include "repl-interacter.hh" #include "repl.hh" @@ -28,11 +29,6 @@ #include "ref.hh" #include "value.hh" -#if HAVE_BOEHMGC -#define GC_INCLUDE_NEW -#include -#endif - #include "strings.hh" namespace nix { @@ -61,9 +57,7 @@ enum class ProcessLineResult { struct NixRepl : AbstractNixRepl , detail::ReplCompleterMixin - #if HAVE_BOEHMGC , gc - #endif { size_t debugTraceIndex; @@ -134,7 +128,7 @@ NixRepl::NixRepl(const LookupPath & lookupPath, nix::ref store, refstaticBaseEnv.get())) , runNixPtr{runNix} - , interacter(make_unique(getDataDir() + "/nix/repl-history")) + , interacter(make_unique(getDataDir() + "/repl-history")) { } @@ -622,7 +616,7 @@ ProcessLineResult NixRepl::processLine(std::string line) // When missing, trigger the normal exception // e.g. :doc builtins.foo // behaves like - // nix-repl> builtins.foo + // nix-repl> builtins.foo // error: attribute 'foo' missing evalString(arg, v); assert(false); @@ -651,7 +645,7 @@ ProcessLineResult NixRepl::processLine(std::string line) logger->cout(trim(renderMarkdownToTerminal(markdown))); } else if (fallbackPos) { - std::stringstream ss; + std::ostringstream ss; ss << "Attribute `" << fallbackName << "`\n\n"; ss << " … defined at " << state->positions[fallbackPos] << "\n\n"; if (fallbackDoc) { @@ -660,7 +654,7 @@ ProcessLineResult NixRepl::processLine(std::string line) ss << "No documentation found.\n\n"; } - auto markdown = ss.str(); + auto markdown = toView(ss); logger->cout(trim(renderMarkdownToTerminal(markdown))); } else @@ -720,7 +714,14 @@ void NixRepl::loadFlake(const std::string & flakeRefS) if (flakeRefS.empty()) throw Error("cannot use ':load-flake' without a path specified. (Use '.' for the current working directory.)"); - auto flakeRef = parseFlakeRef(fetchSettings, flakeRefS, absPath("."), true); + std::filesystem::path cwd; + try { + cwd = std::filesystem::current_path(); + } catch (std::filesystem::filesystem_error & e) { + throw SysError("cannot determine current working directory"); + } + + auto flakeRef = parseFlakeRef(fetchSettings, flakeRefS, cwd.string(), true); if (evalSettings.pureEval && !flakeRef.input.isLocked()) throw Error("cannot use ':load-flake' on locked flake reference '%s' (use --impure to override)", flakeRefS); @@ -825,7 +826,7 @@ void NixRepl::runNix(Path program, const Strings & args, const std::optional -# define GC_INCLUDE_NEW 1 -# include "gc_cpp.h" #endif nix_err nix_libexpr_init(nix_c_context * context) diff --git a/src/libexpr-c/nix_api_external.cc b/src/libexpr-c/nix_api_external.cc index fa78eb5df..d673bcb0b 100644 --- a/src/libexpr-c/nix_api_external.cc +++ b/src/libexpr-c/nix_api_external.cc @@ -14,12 +14,6 @@ #include -#if HAVE_BOEHMGC -# include "gc/gc.h" -# define GC_INCLUDE_NEW 1 -# include "gc_cpp.h" -#endif - void nix_set_string_return(nix_string_return * str, const char * c) { str->str = c; diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index fa2a9cbe2..bae078d31 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -14,12 +14,6 @@ #include "nix_api_value.h" #include "value/context.hh" -#if HAVE_BOEHMGC -# include "gc/gc.h" -# define GC_INCLUDE_NEW 1 -# include "gc_cpp.h" -#endif - // Internal helper functions to check [in] and [out] `Value *` parameters static const nix::Value & check_value_not_null(const nix_value * value) { diff --git a/src/libexpr-c/package.nix b/src/libexpr-c/package.nix index eb42195a4..df49a8bdc 100644 --- a/src/libexpr-c/package.nix +++ b/src/libexpr-c/package.nix @@ -1,10 +1,6 @@ { lib , stdenv -, mkMesonDerivation - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-store-c , nix-expr @@ -18,7 +14,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-expr-c"; inherit version; @@ -35,14 +31,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "h") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-store-c nix-expr @@ -63,10 +51,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libexpr-test-support/.version b/src/libexpr-test-support/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libexpr-test-support/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libexpr-test-support/build-utils-meson b/src/libexpr-test-support/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libexpr-test-support/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/src/libexpr-test-support/local.mk b/src/libexpr-test-support/local.mk new file mode 100644 index 000000000..0501de33c --- /dev/null +++ b/src/libexpr-test-support/local.mk @@ -0,0 +1,23 @@ +libraries += libexpr-test-support + +libexpr-test-support_NAME = libnixexpr-test-support + +libexpr-test-support_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libexpr-test-support_INSTALL_DIR := $(checklibdir) +else + libexpr-test-support_INSTALL_DIR := +endif + +libexpr-test-support_SOURCES := \ + $(wildcard $(d)/tests/*.cc) \ + $(wildcard $(d)/tests/value/*.cc) + +libexpr-test-support_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES) + +libexpr-test-support_LIBS = \ + libstore-test-support libutil-test-support \ + libexpr libstore libutil + +libexpr-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libexpr-support/meson.build b/src/libexpr-test-support/meson.build similarity index 97% rename from tests/unit/libexpr-support/meson.build rename to src/libexpr-test-support/meson.build index 4f50478aa..b9e7f390d 100644 --- a/tests/unit/libexpr-support/meson.build +++ b/src/libexpr-test-support/meson.build @@ -56,6 +56,7 @@ headers = files( ) subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nix-expr-test-support', diff --git a/tests/unit/libexpr-support/package.nix b/src/libexpr-test-support/package.nix similarity index 69% rename from tests/unit/libexpr-support/package.nix rename to src/libexpr-test-support/package.nix index f53aa842f..bcf6118e0 100644 --- a/tests/unit/libexpr-support/package.nix +++ b/src/libexpr-test-support/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-store-test-support , nix-expr @@ -21,15 +16,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-util-test-support"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -37,14 +32,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-store-test-support nix-expr @@ -56,7 +43,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -66,10 +53,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/tests/unit/libexpr-support/tests/libexpr.hh b/src/libexpr-test-support/tests/libexpr.hh similarity index 100% rename from tests/unit/libexpr-support/tests/libexpr.hh rename to src/libexpr-test-support/tests/libexpr.hh diff --git a/tests/unit/libexpr-support/tests/nix_api_expr.hh b/src/libexpr-test-support/tests/nix_api_expr.hh similarity index 100% rename from tests/unit/libexpr-support/tests/nix_api_expr.hh rename to src/libexpr-test-support/tests/nix_api_expr.hh diff --git a/tests/unit/libexpr-support/tests/value/context.cc b/src/libexpr-test-support/tests/value/context.cc similarity index 100% rename from tests/unit/libexpr-support/tests/value/context.cc rename to src/libexpr-test-support/tests/value/context.cc diff --git a/tests/unit/libexpr-support/tests/value/context.hh b/src/libexpr-test-support/tests/value/context.hh similarity index 100% rename from tests/unit/libexpr-support/tests/value/context.hh rename to src/libexpr-test-support/tests/value/context.hh diff --git a/src/libexpr-tests/.version b/src/libexpr-tests/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libexpr-tests/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libexpr-tests/build-utils-meson b/src/libexpr-tests/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libexpr-tests/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/tests/unit/libexpr/data/.gitkeep b/src/libexpr-tests/data/.gitkeep similarity index 100% rename from tests/unit/libexpr/data/.gitkeep rename to src/libexpr-tests/data/.gitkeep diff --git a/tests/unit/libexpr/derived-path.cc b/src/libexpr-tests/derived-path.cc similarity index 100% rename from tests/unit/libexpr/derived-path.cc rename to src/libexpr-tests/derived-path.cc diff --git a/tests/unit/libexpr/error_traces.cc b/src/libexpr-tests/error_traces.cc similarity index 100% rename from tests/unit/libexpr/error_traces.cc rename to src/libexpr-tests/error_traces.cc diff --git a/tests/unit/libexpr/eval.cc b/src/libexpr-tests/eval.cc similarity index 100% rename from tests/unit/libexpr/eval.cc rename to src/libexpr-tests/eval.cc diff --git a/tests/unit/libexpr/json.cc b/src/libexpr-tests/json.cc similarity index 100% rename from tests/unit/libexpr/json.cc rename to src/libexpr-tests/json.cc diff --git a/src/libexpr-tests/local.mk b/src/libexpr-tests/local.mk new file mode 100644 index 000000000..79583a9ee --- /dev/null +++ b/src/libexpr-tests/local.mk @@ -0,0 +1,45 @@ +check: libexpr-tests_RUN + +programs += libexpr-tests + +libexpr-tests_NAME := libnixexpr-tests + +libexpr-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libexpr-tests.xml + +libexpr-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libexpr-tests_INSTALL_DIR := $(checkbindir) +else + libexpr-tests_INSTALL_DIR := +endif + +libexpr-tests_SOURCES := \ + $(wildcard $(d)/*.cc) \ + $(wildcard $(d)/value/*.cc) \ + $(wildcard $(d)/flake/*.cc) + +libexpr-tests_EXTRA_INCLUDES = \ + -I src/libexpr-test-support \ + -I src/libstore-test-support \ + -I src/libutil-test-support \ + $(INCLUDE_libexpr) \ + $(INCLUDE_libexprc) \ + $(INCLUDE_libfetchers) \ + $(INCLUDE_libstore) \ + $(INCLUDE_libstorec) \ + $(INCLUDE_libutil) \ + $(INCLUDE_libutilc) + +libexpr-tests_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES) + +libexpr-tests_LIBS = \ + libexpr-test-support libstore-test-support libutil-test-support \ + libexpr libexprc libfetchers libstore libstorec libutil libutilc + +libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock + +ifdef HOST_WINDOWS + # Increase the default reserved stack size to 65 MB so Nix doesn't run out of space + libexpr-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024))) +endif diff --git a/tests/unit/libexpr/main.cc b/src/libexpr-tests/main.cc similarity index 100% rename from tests/unit/libexpr/main.cc rename to src/libexpr-tests/main.cc diff --git a/tests/unit/libexpr/meson.build b/src/libexpr-tests/meson.build similarity index 97% rename from tests/unit/libexpr/meson.build rename to src/libexpr-tests/meson.build index 21c321334..5a5c9f1d4 100644 --- a/tests/unit/libexpr/meson.build +++ b/src/libexpr-tests/meson.build @@ -28,6 +28,7 @@ subdir('build-utils-meson/subprojects') subdir('build-utils-meson/threads') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') rapidcheck = dependency('rapidcheck') deps_private += rapidcheck diff --git a/tests/unit/libexpr/nix_api_expr.cc b/src/libexpr-tests/nix_api_expr.cc similarity index 99% rename from tests/unit/libexpr/nix_api_expr.cc rename to src/libexpr-tests/nix_api_expr.cc index 8b97d6923..b37ac44b3 100644 --- a/tests/unit/libexpr/nix_api_expr.cc +++ b/src/libexpr-tests/nix_api_expr.cc @@ -8,7 +8,7 @@ #include "tests/nix_api_expr.hh" #include "tests/string_callback.hh" -#include "gmock/gmock.h" +#include #include namespace nixC { diff --git a/tests/unit/libexpr/nix_api_external.cc b/src/libexpr-tests/nix_api_external.cc similarity index 100% rename from tests/unit/libexpr/nix_api_external.cc rename to src/libexpr-tests/nix_api_external.cc diff --git a/tests/unit/libexpr/nix_api_value.cc b/src/libexpr-tests/nix_api_value.cc similarity index 100% rename from tests/unit/libexpr/nix_api_value.cc rename to src/libexpr-tests/nix_api_value.cc diff --git a/tests/unit/libexpr/package.nix b/src/libexpr-tests/package.nix similarity index 80% rename from tests/unit/libexpr/package.nix rename to src/libexpr-tests/package.nix index e70ed7836..959d6b84e 100644 --- a/tests/unit/libexpr/package.nix +++ b/src/libexpr-tests/package.nix @@ -1,12 +1,7 @@ { lib , buildPackages , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonExecutable , nix-expr , nix-expr-c @@ -26,15 +21,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonExecutable (finalAttrs: { pname = "nix-expr-tests"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -42,12 +37,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ nix-expr nix-expr-c @@ -61,7 +50,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -71,10 +60,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru = { tests = { run = runCommand "${finalAttrs.pname}-run" { diff --git a/tests/unit/libexpr/primops.cc b/src/libexpr-tests/primops.cc similarity index 100% rename from tests/unit/libexpr/primops.cc rename to src/libexpr-tests/primops.cc diff --git a/tests/unit/libexpr/search-path.cc b/src/libexpr-tests/search-path.cc similarity index 100% rename from tests/unit/libexpr/search-path.cc rename to src/libexpr-tests/search-path.cc diff --git a/tests/unit/libexpr/trivial.cc b/src/libexpr-tests/trivial.cc similarity index 100% rename from tests/unit/libexpr/trivial.cc rename to src/libexpr-tests/trivial.cc diff --git a/tests/unit/libexpr/value/context.cc b/src/libexpr-tests/value/context.cc similarity index 100% rename from tests/unit/libexpr/value/context.cc rename to src/libexpr-tests/value/context.cc diff --git a/tests/unit/libexpr/value/print.cc b/src/libexpr-tests/value/print.cc similarity index 100% rename from tests/unit/libexpr/value/print.cc rename to src/libexpr-tests/value/print.cc diff --git a/tests/unit/libexpr/value/value.cc b/src/libexpr-tests/value/value.cc similarity index 100% rename from tests/unit/libexpr/value/value.cc rename to src/libexpr-tests/value/value.cc diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 9019edc1f..ea3319f99 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -69,7 +69,7 @@ struct AttrDb { auto state(_state->lock()); - Path cacheDir = getCacheDir() + "/nix/eval-cache-v5"; + Path cacheDir = getCacheDir() + "/eval-cache-v5"; createDirs(cacheDir); Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite"; @@ -101,7 +101,7 @@ struct AttrDb state->txn->commit(); state->txn.reset(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -112,7 +112,7 @@ struct AttrDb try { return fun(); } catch (SQLiteError &) { - ignoreException(); + ignoreExceptionExceptInterrupt(); failed = true; return 0; } @@ -351,7 +351,7 @@ static std::shared_ptr makeAttrDb( try { return std::make_shared(cfg, fingerprint, symbols); } catch (SQLiteError &) { - ignoreException(); + ignoreExceptionExceptInterrupt(); return nullptr; } } diff --git a/src/libexpr/eval-gc.cc b/src/libexpr/eval-gc.cc index 2f0e8c0c9..07ce05a2c 100644 --- a/src/libexpr/eval-gc.cc +++ b/src/libexpr/eval-gc.cc @@ -32,122 +32,6 @@ static void * oomHandler(size_t requested) throw std::bad_alloc(); } -class BoehmGCStackAllocator : public StackAllocator -{ - boost::coroutines2::protected_fixedsize_stack stack{ - // We allocate 8 MB, the default max stack size on NixOS. - // A smaller stack might be quicker to allocate but reduces the stack - // depth available for source filter expressions etc. - std::max(boost::context::stack_traits::default_size(), static_cast(8 * 1024 * 1024))}; - - // This is specific to boost::coroutines2::protected_fixedsize_stack. - // The stack protection page is included in sctx.size, so we have to - // subtract one page size from the stack size. - std::size_t pfss_usable_stack_size(boost::context::stack_context & sctx) - { - return sctx.size - boost::context::stack_traits::page_size(); - } - -public: - boost::context::stack_context allocate() override - { - auto sctx = stack.allocate(); - - // Stacks generally start at a high address and grow to lower addresses. - // Architectures that do the opposite are rare; in fact so rare that - // boost_routine does not implement it. - // So we subtract the stack size. - GC_add_roots(static_cast(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp); - return sctx; - } - - void deallocate(boost::context::stack_context sctx) override - { - GC_remove_roots(static_cast(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp); - stack.deallocate(sctx); - } -}; - -static BoehmGCStackAllocator boehmGCStackAllocator; - -/** - * When a thread goes into a coroutine, we lose its original sp until - * control flow returns to the thread. - * While in the coroutine, the sp points outside the thread stack, - * so we can detect this and push the entire thread stack instead, - * as an approximation. - * The coroutine's stack is covered by `BoehmGCStackAllocator`. - * This is not an optimal solution, because the garbage is scanned when a - * coroutine is active, for both the coroutine and the original thread stack. - * However, the implementation is quite lean, and usually we don't have active - * coroutines during evaluation, so this is acceptable. - */ -void fixupBoehmStackPointer(void ** sp_ptr, void * _pthread_id) -{ - void *& sp = *sp_ptr; - auto pthread_id = reinterpret_cast(_pthread_id); -# ifndef __APPLE__ - pthread_attr_t pattr; -# endif - size_t osStackSize; - // The low address of the stack, which grows down. - void * osStackLimit; - void * osStackBase; - -# ifdef __APPLE__ - osStackSize = pthread_get_stacksize_np(pthread_id); - osStackLimit = pthread_get_stackaddr_np(pthread_id); -# else - if (pthread_attr_init(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); - } -# ifdef HAVE_PTHREAD_GETATTR_NP - if (pthread_getattr_np(pthread_id, &pattr)) { - throw Error("fixupBoehmStackPointer: pthread_getattr_np failed"); - } -# elif HAVE_PTHREAD_ATTR_GET_NP - if (!pthread_attr_init(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_init failed"); - } - if (!pthread_attr_get_np(pthread_id, &pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_get_np failed"); - } -# else -# error "Need one of `pthread_attr_get_np` or `pthread_getattr_np`" -# endif - if (pthread_attr_getstack(&pattr, &osStackLimit, &osStackSize)) { - throw Error("fixupBoehmStackPointer: pthread_attr_getstack failed"); - } - if (pthread_attr_destroy(&pattr)) { - throw Error("fixupBoehmStackPointer: pthread_attr_destroy failed"); - } -# endif - osStackBase = (char *) osStackLimit + osStackSize; - // NOTE: We assume the stack grows down, as it does on all architectures we support. - // Architectures that grow the stack up are rare. - if (sp >= osStackBase || sp < osStackLimit) { // sp is outside the os stack - sp = osStackLimit; - } -} - -/* Disable GC while this object lives. Used by CoroutineContext. - * - * Boehm keeps a count of GC_disable() and GC_enable() calls, - * and only enables GC when the count matches. - */ -class BoehmDisableGC -{ -public: - BoehmDisableGC() - { - GC_disable(); - }; - ~BoehmDisableGC() - { - GC_enable(); - }; -}; - static inline void initGCReal() { /* Initialise the Boehm garbage collector. */ @@ -168,24 +52,6 @@ static inline void initGCReal() GC_set_oom_fn(oomHandler); - StackAllocator::defaultAllocator = &boehmGCStackAllocator; - -// TODO: Remove __APPLE__ condition. -// Comment suggests an implementation that works on darwin and windows -// https://github.com/ivmai/bdwgc/issues/362#issuecomment-1936672196 -# if GC_VERSION_MAJOR >= 8 && GC_VERSION_MINOR >= 2 && GC_VERSION_MICRO >= 4 && !defined(__APPLE__) - GC_set_sp_corrector(&fixupBoehmStackPointer); - - if (!GC_get_sp_corrector()) { - printTalkative("BoehmGC on this platform does not support sp_corrector; will disable GC inside coroutines"); - /* Used to disable GC when entering coroutines on macOS */ - create_coro_gc_hook = []() -> std::shared_ptr { return std::make_shared(); }; - } -# else -# warning \ - "BoehmGC version does not support GC while coroutine exists. GC will be disabled inside coroutines. Consider updating bdw-gc to 8.2.4 or later." -# endif - /* Set the initial heap size to something fairly big (25% of physical RAM, up to a maximum of 384 MiB) so that in most cases we don't need to garbage collect at all. (Collection has a diff --git a/src/libexpr/eval-gc.hh b/src/libexpr/eval-gc.hh index 005175eb7..f3b699b54 100644 --- a/src/libexpr/eval-gc.hh +++ b/src/libexpr/eval-gc.hh @@ -3,6 +3,34 @@ #include +#if HAVE_BOEHMGC + +# define GC_INCLUDE_NEW + +# include +# include +# include + +#else + +# include + +/* Some dummy aliases for Boehm GC definitions to reduce the number of + #ifdefs. */ + +template +using traceable_allocator = std::allocator; + +template +using gc_allocator = std::allocator; + +# define GC_MALLOC_ATOMIC std::malloc + +struct gc +{}; + +#endif + namespace nix { /** diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 6fa34b062..d5ce238b2 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -4,6 +4,7 @@ #include "print.hh" #include "eval.hh" #include "eval-error.hh" +#include "eval-settings.hh" namespace nix { @@ -138,5 +139,12 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e } } +[[gnu::always_inline]] +inline CallDepth EvalState::addCallDepth(const PosIdx pos) { + if (callDepth > settings.maxCallDepth) + error("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); + + return CallDepth(callDepth); +}; } diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 2846eccbc..4cbcb39b9 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -99,7 +99,7 @@ const std::string & EvalSettings::getCurrentSystem() const Path getNixDefExpr() { return settings.useXDGBaseDirectories - ? getStateDir() + "/nix/defexpr" + ? getStateDir() + "/defexpr" : getHome() + "/.nix-defexpr"; } diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index 0cfc14c1b..115e3ee50 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -87,10 +87,19 @@ struct EvalSettings : Config If the respective paths are accessible, the default values are: - `$HOME/.nix-defexpr/channels` + + The [user channel link](@docroot@/command-ref/files/default-nix-expression.md#user-channel-link), pointing to the current state of [channels](@docroot@/command-ref/files/channels.md) for the current user. + - `nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs` + + The current state of the `nixpkgs` channel for the `root` user. + - `$NIX_STATE_DIR/profiles/per-user/root/channels` - See [`NIX_STATE_DIR`](@docroot@/command-ref/env-common.md#env-NIX_STATE_DIR) for details. + The current state of all channels for the `root` user. + + These files are set up by the [Nix installer](@docroot@/installation/installing-binary.md). + See [`NIX_STATE_DIR`](@docroot@/command-ref/env-common.md#env-NIX_STATE_DIR) for details on the environment variable. > **Note** > @@ -185,7 +194,11 @@ struct EvalSettings : Config )"}; Setting useEvalCache{this, true, "eval-cache", - "Whether to use the flake evaluation cache."}; + R"( + Whether to use the flake evaluation cache. + Certain commands won't have to evaluate when invoked for the second time with a particular version of a flake. + Intermediate results are not cached. + )"}; Setting ignoreExceptionsDuringTry{this, false, "ignore-try", R"( diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index c9101678c..e4937735b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1,5 +1,4 @@ #include "eval.hh" -#include "eval-gc.hh" #include "eval-settings.hh" #include "primops.hh" #include "print-options.hh" @@ -39,16 +38,6 @@ # include #endif -#if HAVE_BOEHMGC - -# define GC_INCLUDE_NEW - -# include -# include -# include - -#endif - #include "strings-inline.hh" using json = nlohmann::json; @@ -58,24 +47,7 @@ namespace nix { static char * allocString(size_t size) { char * t; -#if HAVE_BOEHMGC t = (char *) GC_MALLOC_ATOMIC(size); -#else - t = (char *) malloc(size); -#endif - if (!t) throw std::bad_alloc(); - return t; -} - - -static char * dupString(const char * s) -{ - char * t; -#if HAVE_BOEHMGC - t = GC_STRDUP(s); -#else - t = strdup(s); -#endif if (!t) throw std::bad_alloc(); return t; } @@ -99,11 +71,7 @@ static const char * makeImmutableString(std::string_view s) RootValue allocRootValue(Value * v) { -#if HAVE_BOEHMGC return std::allocate_shared(traceable_allocator(), v); -#else - return std::make_shared(v); -#endif } // Pretty print types for assertion errors @@ -571,7 +539,7 @@ std::optional EvalState::getDoc(Value & v) if (v.isLambda()) { auto exprLambda = v.payload.lambda.fun; - std::stringstream s(std::ios_base::out); + std::ostringstream s; std::string name; auto pos = positions[exprLambda->getPos()]; std::string docStr; @@ -603,19 +571,33 @@ std::optional EvalState::getDoc(Value & v) s << docStr; - s << '\0'; // for making a c string below - std::string ss = s.str(); - return Doc { .pos = pos, .name = name, .arity = 0, // FIXME: figure out how deep by syntax only? It's not semantically useful though... .args = {}, - .doc = - // FIXME: this leaks; make the field std::string? - strdup(ss.data()), + .doc = makeImmutableString(toView(s)), // NOTE: memory leak when compiled without GC }; } + if (isFunctor(v)) { + try { + Value & functor = *v.attrs()->find(sFunctor)->value; + Value * vp = &v; + Value partiallyApplied; + // The first paramater is not user-provided, and may be + // handled by code that is opaque to the user, like lib.const = x: y: y; + // So preferably we show docs that are relevant to the + // "partially applied" function returned by e.g. `const`. + // We apply the first argument: + callFunction(functor, 1, &vp, partiallyApplied, noPos); + auto _level = addCallDepth(noPos); + return getDoc(partiallyApplied); + } + catch (Error & e) { + e.addTrace(nullptr, "while partially calling '%1%' to retrieve documentation", "__functor"); + throw; + } + } return {}; } @@ -836,9 +818,10 @@ static const char * * encodeContext(const NixStringContext & context) size_t n = 0; auto ctx = (const char * *) allocBytes((context.size() + 1) * sizeof(char *)); - for (auto & i : context) - ctx[n++] = dupString(i.to_string().c_str()); - ctx[n] = 0; + for (auto & i : context) { + ctx[n++] = makeImmutableString({i.to_string()}); + } + ctx[n] = nullptr; return ctx; } else return nullptr; @@ -1471,26 +1454,9 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v) v.mkLambda(&env, this); } -namespace { -/** Increments a count on construction and decrements on destruction. - */ -class CallDepth { - size_t & count; -public: - CallDepth(size_t & count) : count(count) { - ++count; - } - ~CallDepth() { - --count; - } -}; -}; - void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { - if (callDepth > settings.maxCallDepth) - error("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); - CallDepth _level(callDepth); + auto _level = addCallDepth(pos); auto trace = settings.traceFunctionCalls ? std::make_unique(positions[pos]) @@ -1834,11 +1800,9 @@ void ExprIf::eval(EvalState & state, Env & env, Value & v) void ExprAssert::eval(EvalState & state, Env & env, Value & v) { if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) { - auto exprStr = ({ - std::ostringstream out; - cond->show(state.symbols, out); - out.str(); - }); + std::ostringstream out; + cond->show(state.symbols, out); + auto exprStr = toView(out); if (auto eq = dynamic_cast(cond)) { try { @@ -2870,7 +2834,9 @@ void EvalState::printStatistics() #endif #if HAVE_BOEHMGC {GC_is_incremental_mode() ? "gcNonIncremental" : "gc", gcFullOnlyTime}, +#ifndef _WIN32 // TODO implement {GC_is_incremental_mode() ? "gcNonIncrementalFraction" : "gcFraction", gcFullOnlyTime / cpuTime}, +#endif #endif }; topObj["envs"] = { @@ -3088,7 +3054,9 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pa if (EvalSettings::isPseudoUrl(value)) { try { auto accessor = fetchers::downloadTarball( - EvalSettings::resolvePseudoUrl(value)).accessor; + store, + fetchSettings, + EvalSettings::resolvePseudoUrl(value)); auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy); return finish(store->toRealPath(storePath)); } catch (Error & e) { diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index ddf5dcf94..f7ed6be83 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -41,6 +41,21 @@ namespace eval_cache { class EvalCache; } +/** + * Increments a count on construction and decrements on destruction. + */ +class CallDepth { + size_t & count; + +public: + CallDepth(size_t & count) : count(count) { + ++count; + } + ~CallDepth() { + --count; + } +}; + /** * Function that implements a primop. */ @@ -124,11 +139,7 @@ struct Constant bool impureOnly = false; }; -#if HAVE_BOEHMGC - typedef std::map, traceable_allocator > > ValMap; -#else - typedef std::map ValMap; -#endif +typedef std::map, traceable_allocator > > ValMap; typedef std::unordered_map DocCommentMap; @@ -314,21 +325,13 @@ private: /** * A cache from path names to parse trees. */ -#if HAVE_BOEHMGC typedef std::unordered_map, std::equal_to, traceable_allocator>> FileParseCache; -#else - typedef std::unordered_map FileParseCache; -#endif FileParseCache fileParseCache; /** * A cache from path names to values. */ -#if HAVE_BOEHMGC typedef std::unordered_map, std::equal_to, traceable_allocator>> FileEvalCache; -#else - typedef std::unordered_map FileEvalCache; -#endif FileEvalCache fileEvalCache; /** @@ -625,6 +628,12 @@ public: const char * doc; }; + /** + * Retrieve the documentation for a value. This will evaluate the value if + * it is a thunk, and it will partially apply __functor if applicable. + * + * @param v The value to get the documentation for. + */ std::optional getDoc(Value & v); private: @@ -649,6 +658,11 @@ private: public: + /** + * Check that the call depth is within limits, and increment it, until the returned object is destroyed. + */ + inline CallDepth addCallDepth(const PosIdx pos); + /** * Do a deep equality test between two values. That is, list * elements and attributes are compared recursively. diff --git a/src/libexpr/gc-small-vector.hh b/src/libexpr/gc-small-vector.hh index 7f4f08fc7..8330dd2dc 100644 --- a/src/libexpr/gc-small-vector.hh +++ b/src/libexpr/gc-small-vector.hh @@ -2,28 +2,15 @@ #include -#if HAVE_BOEHMGC - -#include -#include -#include - -#endif +#include "value.hh" namespace nix { -struct Value; - /** * A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap. */ -#if HAVE_BOEHMGC template using SmallVector = boost::container::small_vector>; -#else -template -using SmallVector = boost::container::small_vector; -#endif /** * A vector of value pointers. See `SmallVector`. @@ -39,4 +26,4 @@ using SmallValueVector = SmallVector; template using SmallTemporaryValueVector = SmallVector; -} \ No newline at end of file +} diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 20963ec91..1ac13fcd2 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -374,11 +374,12 @@ static void getDerivations(EvalState & state, Value & vIn, bound to the attribute with the "lower" name should take precedence). */ for (auto & i : v.attrs()->lexicographicOrder(state.symbols)) { + std::string_view symbol{state.symbols[i->name]}; try { - debug("evaluating attribute '%1%'", state.symbols[i->name]); - if (!std::regex_match(std::string(state.symbols[i->name]), attrRegex)) + debug("evaluating attribute '%1%'", symbol); + if (!std::regex_match(symbol.begin(), symbol.end(), attrRegex)) continue; - std::string pathPrefix2 = addToPath(pathPrefix, state.symbols[i->name]); + std::string pathPrefix2 = addToPath(pathPrefix, symbol); if (combineChannels) getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); else if (getDerivation(state, *i->value, pathPrefix2, drvs, done, ignoreAssertionFailures)) { @@ -392,7 +393,7 @@ static void getDerivations(EvalState & state, Value & vIn, } } } catch (Error & e) { - e.addTrace(state.positions[i->pos], "while evaluating the attribute '%s'", state.symbols[i->name]); + e.addTrace(state.positions[i->pos], "while evaluating the attribute '%s'", symbol); throw; } } diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index db3eedb05..e4e277af8 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -83,11 +83,7 @@ public: }; -#if HAVE_BOEHMGC typedef std::list> PackageInfos; -#else -typedef std::list PackageInfos; -#endif /** diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index dbc74faf9..063ff0753 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -663,4 +663,32 @@ std::string DocComment::getInnerText(const PosTable & positions) const { return docStr; } + + +/* ‘Cursed or’ handling. + * + * In parser.y, every use of expr_select in a production must call one of the + * two below functions. + * + * To be removed by https://github.com/NixOS/nix/pull/11121 + */ + +void ExprCall::resetCursedOr() +{ + cursedOrEndPos.reset(); +} + +void ExprCall::warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) +{ + if (cursedOrEndPos.has_value()) { + std::ostringstream out; + out << "at " << positions[pos] << ": " + "This expression uses `or` as an identifier in a way that will change in a future Nix release.\n" + "Wrap this entire expression in parentheses to preserve its current meaning:\n" + " (" << positions[pos].getSnippetUpTo(positions[*cursedOrEndPos]).value_or("could not read expression") << ")\n" + "Give feedback at https://github.com/NixOS/nix/pull/11121"; + warn(out.str()); + } +} + } diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 7868834f1..bdf4e214a 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -96,6 +96,10 @@ struct Expr virtual void setName(Symbol name); virtual void setDocComment(DocComment docComment) { }; virtual PosIdx getPos() const { return noPos; } + + // These are temporary methods to be used only in parser.y + virtual void resetCursedOr() { }; + virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) { }; }; #define COMMON_METHODS \ @@ -354,10 +358,16 @@ struct ExprCall : Expr Expr * fun; std::vector args; PosIdx pos; + std::optional cursedOrEndPos; // used during parsing to warn about https://github.com/NixOS/nix/issues/11118 ExprCall(const PosIdx & pos, Expr * fun, std::vector && args) - : fun(fun), args(args), pos(pos) + : fun(fun), args(args), pos(pos), cursedOrEndPos({}) + { } + ExprCall(const PosIdx & pos, Expr * fun, std::vector && args, PosIdx && cursedOrEndPos) + : fun(fun), args(args), pos(pos), cursedOrEndPos(cursedOrEndPos) { } PosIdx getPos() const override { return pos; } + virtual void resetCursedOr() override; + virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) override; COMMON_METHODS }; diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index 4d10079ff..ca1f8bf21 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -1,11 +1,7 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools +, mkMesonLibrary -, meson -, ninja -, pkg-config , bison , flex , cmake # for resolving toml11 dep @@ -38,7 +34,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-expr"; inherit version; @@ -55,15 +51,13 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ./lexer.l ./parser.y - (fileset.fileFilter (file: file.hasExt "nix") ./.) + (fileset.difference + (fileset.fileFilter (file: file.hasExt "nix") ./.) + ./package.nix + ) ]; - outputs = [ "out" "dev" ]; - nativeBuildInputs = [ - meson - ninja - pkg-config bison flex cmake @@ -102,10 +96,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 1ee2a2e41..45a8d3f2e 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -265,19 +265,28 @@ expr_op ; expr_app - : expr_app expr_select { $$ = makeCall(CUR_POS, $1, $2); } - | expr_select + : expr_app expr_select { $$ = makeCall(CUR_POS, $1, $2); $2->warnIfCursedOr(state->symbols, state->positions); } + | /* Once a ‘cursed or’ reaches this nonterminal, it is no longer cursed, + because the uncursed parse would also produce an expr_app. But we need + to remove the cursed status in order to prevent valid things like + `f (g or)` from triggering the warning. */ + expr_select { $$ = $1; $$->resetCursedOr(); } ; expr_select : expr_simple '.' attrpath { $$ = new ExprSelect(CUR_POS, $1, std::move(*$3), nullptr); delete $3; } | expr_simple '.' attrpath OR_KW expr_select - { $$ = new ExprSelect(CUR_POS, $1, std::move(*$3), $5); delete $3; } - | /* Backwards compatibility: because Nixpkgs has a rarely used - function named ‘or’, allow stuff like ‘map or [...]’. */ + { $$ = new ExprSelect(CUR_POS, $1, std::move(*$3), $5); delete $3; $5->warnIfCursedOr(state->symbols, state->positions); } + | /* Backwards compatibility: because Nixpkgs has a function named ‘or’, + allow stuff like ‘map or [...]’. This production is problematic (see + https://github.com/NixOS/nix/issues/11118) and will be refactored in the + future by treating `or` as a regular identifier. The refactor will (in + very rare cases, we think) change the meaning of expressions, so we mark + the ExprCall with data (establishing that it is a ‘cursed or’) that can + be used to emit a warning when an affected expression is parsed. */ expr_simple OR_KW - { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}); } + { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}, state->positions.add(state->origin, @$.endOffset)); } | expr_simple ; @@ -351,7 +360,7 @@ string_parts_interpolated path_start : PATH { - Path path(absPath({$1.p, $1.l}, state->basePath.path.abs())); + Path path(absPath(std::string_view{$1.p, $1.l}, state->basePath.path.abs())); /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; @@ -473,7 +482,7 @@ string_attr ; expr_list - : expr_list expr_select { $$ = $1; $1->elems.push_back($2); /* !!! dangerous */ } + : expr_list expr_select { $$ = $1; $1->elems.push_back($2); /* !!! dangerous */; $2->warnIfCursedOr(state->symbols, state->positions); } | { $$ = new ExprList; } ; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ffc8f1f63..7a0d1ce2a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -40,6 +40,13 @@ namespace nix { * Miscellaneous *************************************************************/ +static inline Value * mkString(EvalState & state, const std::csub_match & match) +{ + Value * v = state.allocValue(); + v->mkString({match.first, match.second}); + return v; +} + StringMap EvalState::realiseContext(const NixStringContext & context, StorePathSet * maybePathsOut, bool isIFD) { std::vector drvs; @@ -84,6 +91,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS /* Build/substitute the context. */ std::vector buildReqs; + buildReqs.reserve(drvs.size()); for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); buildStore->buildPaths(buildReqs, bmNormal, store); @@ -171,6 +179,78 @@ static void mkOutputString( o.second.path(*state.store, Derivation::nameFromPath(drvPath), o.first)); } +/** + * `import` will parse a derivation when it imports a `.drv` file from the store. + * + * @param state The evaluation state. + * @param pos The position of the `import` call. + * @param path The path to the `.drv` to import. + * @param storePath The path to the `.drv` to import. + * @param v Return value + */ +void derivationToValue(EvalState & state, const PosIdx pos, const SourcePath & path, const StorePath & storePath, Value & v) { + auto path2 = path.path.abs(); + Derivation drv = state.store->readDerivation(storePath); + auto attrs = state.buildBindings(3 + drv.outputs.size()); + attrs.alloc(state.sDrvPath).mkString(path2, { + NixStringContextElem::DrvDeep { .drvPath = storePath }, + }); + attrs.alloc(state.sName).mkString(drv.env["name"]); + + auto list = state.buildList(drv.outputs.size()); + for (const auto & [i, o] : enumerate(drv.outputs)) { + mkOutputString(state, attrs, storePath, o); + (list[i] = state.allocValue())->mkString(o.first); + } + attrs.alloc(state.sOutputs).mkList(list); + + auto w = state.allocValue(); + w->mkAttrs(attrs); + + if (!state.vImportedDrvToDerivation) { + state.vImportedDrvToDerivation = allocRootValue(state.allocValue()); + state.eval(state.parseExprFromString( + #include "imported-drv-to-derivation.nix.gen.hh" + , state.rootPath(CanonPath::root)), **state.vImportedDrvToDerivation); + } + + state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh"); + v.mkApp(*state.vImportedDrvToDerivation, w); + state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh"); +} + +/** + * Import a Nix file with an alternate base scope, as `builtins.scopedImport` does. + * + * @param state The evaluation state. + * @param pos The position of the import call. + * @param path The path to the file to import. + * @param vScope The base scope to use for the import. + * @param v Return value + */ +static void scopedImport(EvalState & state, const PosIdx pos, SourcePath & path, Value * vScope, Value & v) { + state.forceAttrs(*vScope, pos, "while evaluating the first argument passed to builtins.scopedImport"); + + Env * env = &state.allocEnv(vScope->attrs()->size()); + env->up = &state.baseEnv; + + auto staticEnv = std::make_shared(nullptr, state.staticBaseEnv.get(), vScope->attrs()->size()); + + unsigned int displ = 0; + for (auto & attr : *vScope->attrs()) { + staticEnv->vars.emplace_back(attr.name, displ); + env->values[displ++] = attr.value; + } + + // No need to call staticEnv.sort(), because + // args[0]->attrs is already sorted. + + printTalkative("evaluating file '%1%'", path); + Expr * e = state.parseExprFromFile(resolveExprPath(path), staticEnv); + + e->eval(state, *env, v); +} + /* Load and evaluate an expression from path specified by the argument. */ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v) @@ -189,60 +269,13 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v }; if (auto storePath = isValidDerivationInStore()) { - Derivation drv = state.store->readDerivation(*storePath); - auto attrs = state.buildBindings(3 + drv.outputs.size()); - attrs.alloc(state.sDrvPath).mkString(path2, { - NixStringContextElem::DrvDeep { .drvPath = *storePath }, - }); - attrs.alloc(state.sName).mkString(drv.env["name"]); - - auto list = state.buildList(drv.outputs.size()); - for (const auto & [i, o] : enumerate(drv.outputs)) { - mkOutputString(state, attrs, *storePath, o); - (list[i] = state.allocValue())->mkString(o.first); - } - attrs.alloc(state.sOutputs).mkList(list); - - auto w = state.allocValue(); - w->mkAttrs(attrs); - - if (!state.vImportedDrvToDerivation) { - state.vImportedDrvToDerivation = allocRootValue(state.allocValue()); - state.eval(state.parseExprFromString( - #include "imported-drv-to-derivation.nix.gen.hh" - , state.rootPath(CanonPath::root)), **state.vImportedDrvToDerivation); - } - - state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh"); - v.mkApp(*state.vImportedDrvToDerivation, w); - state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh"); + derivationToValue(state, pos, path, *storePath, v); + } + else if (vScope) { + scopedImport(state, pos, path, vScope, v); } - else { - if (!vScope) - state.evalFile(path, v); - else { - state.forceAttrs(*vScope, pos, "while evaluating the first argument passed to builtins.scopedImport"); - - Env * env = &state.allocEnv(vScope->attrs()->size()); - env->up = &state.baseEnv; - - auto staticEnv = std::make_shared(nullptr, state.staticBaseEnv.get(), vScope->attrs()->size()); - - unsigned int displ = 0; - for (auto & attr : *vScope->attrs()) { - staticEnv->vars.emplace_back(attr.name, displ); - env->values[displ++] = attr.value; - } - - // No need to call staticEnv.sort(), because - // args[0]->attrs is already sorted. - - printTalkative("evaluating file '%1%'", path); - Expr * e = state.parseExprFromFile(resolveExprPath(path), staticEnv); - - e->eval(state, *env, v); - } + state.evalFile(path, v); } } @@ -631,11 +664,7 @@ struct CompareValues }; -#if HAVE_BOEHMGC typedef std::list> ValueList; -#else -typedef std::list ValueList; -#endif static Bindings::const_iterator getAttr( @@ -941,6 +970,9 @@ static RegisterPrimOp primop_tryEval({ `let e = { x = throw ""; }; in (builtins.tryEval (builtins.deepSeq e e)).success` will be `false`. + + `tryEval` intentionally does not return the error message, because that risks bringing non-determinism into the evaluation result, and it would become very difficult to improve error reporting without breaking existing expressions. + Instead, use [`builtins.addErrorContext`](@docroot@/language/builtins.md#builtins-addErrorContext) to add context to the error message, and use a Nix unit testing tool for testing. )", .fun = prim_tryEval, }); @@ -2133,7 +2165,7 @@ static void prim_toXML(EvalState & state, const PosIdx pos, Value * * args, Valu std::ostringstream out; NixStringContext context; printValueAsXML(state, true, false, *args[0], out, context, pos); - v.mkString(out.str(), context); + v.mkString(toView(out), context); } static RegisterPrimOp primop_toXML({ @@ -2241,7 +2273,7 @@ static void prim_toJSON(EvalState & state, const PosIdx pos, Value * * args, Val std::ostringstream out; NixStringContext context; printValueAsJSON(state, true, *args[0], pos, out, context); - v.mkString(out.str(), context); + v.mkString(toView(out), context); } static RegisterPrimOp primop_toJSON({ @@ -3136,7 +3168,7 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg std::optional list; }; - std::map attrsSeen; + std::map, traceable_allocator>> attrsSeen; state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.zipAttrsWith"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.zipAttrsWith"); @@ -4272,7 +4304,7 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) if (!match[i + 1].matched) v2 = &state.vNull; else - (v2 = state.allocValue())->mkString(match[i + 1].str()); + v2 = mkString(state, match[i + 1]); v.mkList(list); } catch (std::regex_error & e) { @@ -4356,7 +4388,7 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto match = *i; // Add a string for non-matched characters. - (list[idx++] = state.allocValue())->mkString(match.prefix().str()); + list[idx++] = mkString(state, match.prefix()); // Add a list for matched substrings. const size_t slen = match.size() - 1; @@ -4367,14 +4399,14 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) if (!match[si + 1].matched) v2 = &state.vNull; else - (v2 = state.allocValue())->mkString(match[si + 1].str()); + v2 = mkString(state, match[si + 1]); } (list[idx++] = state.allocValue())->mkList(list2); // Add a string for non-matched suffix characters. if (idx == 2 * len) - (list[idx++] = state.allocValue())->mkString(match.suffix().str()); + list[idx++] = mkString(state, match.suffix()); } assert(idx == 2 * len + 1); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 8c3f1b4e8..02683b173 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -86,7 +86,7 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency({ This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies). - This is unsafe because it allows us to "forget" store objects we would have otherwise refered to with the string context, + This is unsafe because it allows us to "forget" store objects we would have otherwise referred to with the string context, whereas Nix normally tracks all dependencies consistently. Safe operations "grow" but never "shrink" string contexts. [`builtins.addDrvOutputDependencies`] in contrast is safe because "derivation deep" string context element always refers to the underlying derivation (among many more things). diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 78328701d..d2266e2bc 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -11,6 +11,8 @@ #include "value-to-json.hh" #include "fetch-to-store.hh" +#include + #include #include #include @@ -246,7 +248,7 @@ static RegisterPrimOp primop_fetchTree({ The following source types and associated input attributes are supported. - `"file"` @@ -507,7 +509,11 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v // https://github.com/NixOS/nix/issues/4313 auto storePath = unpack - ? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name) + ? fetchToStore( + *state.store, + fetchers::downloadTarball(state.store, state.fetchSettings, *url), + FetchMode::Copy, + name) : fetchers::downloadFile(state.store, *url, name).storePath; if (expectedHash) { diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index b4f1df7a8..264046711 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -66,7 +66,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V attrs.alloc("_type").mkString("timestamp"); std::ostringstream s; s << t; - attrs.alloc("value").mkString(s.str()); + attrs.alloc("value").mkString(toView(s)); v.mkAttrs(attrs); } else { throw std::runtime_error("Dates and times are not supported"); diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 4d1a6868c..d62aaf25f 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -460,7 +460,7 @@ private: std::ostringstream s; s << state.positions[v.payload.lambda.fun->pos]; - output << " @ " << filterANSIEscapes(s.str()); + output << " @ " << filterANSIEscapes(toView(s)); } } else if (v.isPrimOp()) { if (v.primOp()) diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index f68befe0e..d98161488 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -4,15 +4,13 @@ #include #include +#include "eval-gc.hh" #include "symbol-table.hh" #include "value/context.hh" #include "source-path.hh" #include "print-options.hh" #include "checked-arithmetic.hh" -#if HAVE_BOEHMGC -#include -#endif #include namespace nix { @@ -143,7 +141,9 @@ public: Value * * elems; ListBuilder(EvalState & state, size_t size); - ListBuilder(ListBuilder && x) + // NOTE: Can be noexcept because we are just copying integral values and + // raw pointers. + ListBuilder(ListBuilder && x) noexcept : size(x.size) , inlineElems{x.inlineElems[0], x.inlineElems[1]} , elems(size <= 2 ? inlineElems : x.elems) @@ -498,15 +498,9 @@ void Value::mkBlackhole() } -#if HAVE_BOEHMGC typedef std::vector> ValueVector; typedef std::unordered_map, std::equal_to, traceable_allocator>> ValueMap; typedef std::map, traceable_allocator>> ValueVectorMap; -#else -typedef std::vector ValueVector; -typedef std::unordered_map ValueMap; -typedef std::map ValueVectorMap; -#endif /** diff --git a/src/libfetchers-tests/.version b/src/libfetchers-tests/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libfetchers-tests/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libfetchers-tests/build-utils-meson b/src/libfetchers-tests/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libfetchers-tests/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/tests/unit/libfetchers/data/public-key/defaultType.json b/src/libfetchers-tests/data/public-key/defaultType.json similarity index 100% rename from tests/unit/libfetchers/data/public-key/defaultType.json rename to src/libfetchers-tests/data/public-key/defaultType.json diff --git a/tests/unit/libfetchers/data/public-key/noRoundTrip.json b/src/libfetchers-tests/data/public-key/noRoundTrip.json similarity index 100% rename from tests/unit/libfetchers/data/public-key/noRoundTrip.json rename to src/libfetchers-tests/data/public-key/noRoundTrip.json diff --git a/tests/unit/libfetchers/data/public-key/simple.json b/src/libfetchers-tests/data/public-key/simple.json similarity index 100% rename from tests/unit/libfetchers/data/public-key/simple.json rename to src/libfetchers-tests/data/public-key/simple.json diff --git a/tests/unit/libfetchers/git-utils.cc b/src/libfetchers-tests/git-utils.cc similarity index 98% rename from tests/unit/libfetchers/git-utils.cc rename to src/libfetchers-tests/git-utils.cc index de5110cc3..0bf3076dc 100644 --- a/tests/unit/libfetchers/git-utils.cc +++ b/src/libfetchers-tests/git-utils.cc @@ -77,7 +77,7 @@ TEST_F(GitUtilsTest, sink_basic) // sink->createHardlink("foo-1.1/links/foo-2", CanonPath("foo-1.1/hello")); - auto result = repo->dereferenceSingletonDirectory(sink->sync()); + auto result = repo->dereferenceSingletonDirectory(sink->flush()); auto accessor = repo->getAccessor(result, false); auto entries = accessor->readDirectory(CanonPath::root); ASSERT_EQ(entries.size(), 5); diff --git a/src/libfetchers-tests/local.mk b/src/libfetchers-tests/local.mk new file mode 100644 index 000000000..5c90f1fc7 --- /dev/null +++ b/src/libfetchers-tests/local.mk @@ -0,0 +1,37 @@ +check: libfetchers-tests_RUN + +programs += libfetchers-tests + +libfetchers-tests_NAME = libnixfetchers-tests + +libfetchers-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libfetchers-tests.xml + +libfetchers-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libfetchers-tests_INSTALL_DIR := $(checkbindir) +else + libfetchers-tests_INSTALL_DIR := +endif + +libfetchers-tests_SOURCES := $(wildcard $(d)/*.cc) + +libfetchers-tests_EXTRA_INCLUDES = \ + -I src/libstore-test-support \ + -I src/libutil-test-support \ + $(INCLUDE_libfetchers) \ + $(INCLUDE_libstore) \ + $(INCLUDE_libutil) + +libfetchers-tests_CXXFLAGS += $(libfetchers-tests_EXTRA_INCLUDES) + +libfetchers-tests_LIBS = \ + libstore-test-support libutil-test-support \ + libfetchers libstore libutil + +libfetchers-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) $(LIBGIT2_LIBS) + +ifdef HOST_WINDOWS + # Increase the default reserved stack size to 65 MB so Nix doesn't run out of space + libfetchers-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024))) +endif diff --git a/tests/unit/libfetchers/meson.build b/src/libfetchers-tests/meson.build similarity index 97% rename from tests/unit/libfetchers/meson.build rename to src/libfetchers-tests/meson.build index dc9818e27..d948dbad6 100644 --- a/tests/unit/libfetchers/meson.build +++ b/src/libfetchers-tests/meson.build @@ -27,6 +27,7 @@ subdir('build-utils-meson/subprojects') subdir('build-utils-meson/threads') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') rapidcheck = dependency('rapidcheck') deps_private += rapidcheck diff --git a/tests/unit/libfetchers/package.nix b/src/libfetchers-tests/package.nix similarity index 80% rename from tests/unit/libfetchers/package.nix rename to src/libfetchers-tests/package.nix index ad512f562..7b2ba8f2c 100644 --- a/tests/unit/libfetchers/package.nix +++ b/src/libfetchers-tests/package.nix @@ -1,12 +1,7 @@ { lib , buildPackages , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonExecutable , nix-fetchers , nix-store-test-support @@ -25,15 +20,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonExecutable (finalAttrs: { pname = "nix-fetchers-tests"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -41,12 +36,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ nix-fetchers nix-store-test-support @@ -59,7 +48,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -69,10 +58,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru = { tests = { run = runCommand "${finalAttrs.pname}-run" { diff --git a/tests/unit/libfetchers/public-key.cc b/src/libfetchers-tests/public-key.cc similarity index 91% rename from tests/unit/libfetchers/public-key.cc rename to src/libfetchers-tests/public-key.cc index 8a639da9f..80796bd0f 100644 --- a/tests/unit/libfetchers/public-key.cc +++ b/src/libfetchers-tests/public-key.cc @@ -10,11 +10,11 @@ using nlohmann::json; class PublicKeyTest : public CharacterizationTest { - Path unitTestData = getUnitTestData() + "/public-key"; + std::filesystem::path unitTestData = getUnitTestData() / "public-key"; public: - Path goldenMaster(std::string_view testStem) const override { - return unitTestData + "/" + testStem; + std::filesystem::path goldenMaster(std::string_view testStem) const override { + return unitTestData / testStem; } }; diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index 7019b0325..b0b6cb887 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -36,7 +36,7 @@ struct CacheImpl : Cache { auto state(_state.lock()); - auto dbPath = getCacheDir() + "/nix/fetcher-cache-v2.sqlite"; + auto dbPath = getCacheDir() + "/fetcher-cache-v2.sqlite"; createDirs(dirOf(dbPath)); state->db = SQLite(dbPath); diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index dee1f687b..b07e8cb6e 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -170,24 +170,6 @@ std::pair Input::fetchToStore(ref store) const if (!scheme) throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs())); - /* The tree may already be in the Nix store, or it could be - substituted (which is often faster than fetching from the - original source). So check that. */ - if (getNarHash()) { - try { - auto storePath = computeStorePath(*store); - - store->ensurePath(storePath); - - debug("using substituted/cached input '%s' in '%s'", - to_string(), store->printStorePath(storePath)); - - return {std::move(storePath), *this}; - } catch (Error & e) { - debug("substitution of input '%s' failed: %s", to_string(), e.what()); - } - } - auto [storePath, input] = [&]() -> std::pair { try { auto [accessor, final] = getAccessorUnchecked(store); diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 114aa4ec0..74e68fe12 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -13,13 +13,17 @@ #include #include #include +#include #include +#include #include #include #include #include #include #include +#include +#include #include #include @@ -76,6 +80,9 @@ typedef std::unique_ptr> StatusLi typedef std::unique_ptr> Remote; typedef std::unique_ptr> GitConfig; typedef std::unique_ptr> ConfigIterator; +typedef std::unique_ptr> ObjectDb; +typedef std::unique_ptr> PackBuilder; +typedef std::unique_ptr> Indexer; // A helper to ensure that we don't leak objects returned by libgit2. template @@ -159,24 +166,105 @@ static Object peelToTreeOrBlob(git_object * obj) return peelObject(obj, GIT_OBJECT_TREE); } +struct PackBuilderContext { + std::exception_ptr exception; + + void handleException(const char * activity, int errCode) + { + switch (errCode) { + case GIT_OK: + break; + case GIT_EUSER: + if (!exception) + panic("PackBuilderContext::handleException: user error, but exception was not set"); + + std::rethrow_exception(exception); + default: + throw Error("%s: %i, %s", Uncolored(activity), errCode, git_error_last()->message); + } + } +}; + +extern "C" { + +/** + * A `git_packbuilder_progress` implementation that aborts the pack building if needed. + */ +static int packBuilderProgressCheckInterrupt(int stage, uint32_t current, uint32_t total, void *payload) +{ + PackBuilderContext & args = * (PackBuilderContext *) payload; + try { + checkInterrupt(); + return GIT_OK; + } catch (const std::exception & e) { + args.exception = std::current_exception(); + return GIT_EUSER; + } +}; +static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuilderProgressCheckInterrupt; + +} // extern "C" + +static void initRepoAtomically(std::filesystem::path &path, bool bare) { + if (pathExists(path.string())) return; + + Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() })); + AutoDelete delTmpDir(tmpDir, true); + Repository tmpRepo; + + if (git_repository_init(Setter(tmpRepo), tmpDir.c_str(), bare)) + throw Error("creating Git repository %s: %s", path, git_error_last()->message); + try { + std::filesystem::rename(tmpDir, path); + } catch (std::filesystem::filesystem_error & e) { + // Someone may race us to create the repository. + if (e.code() == std::errc::file_exists + // `path` may be attempted to be deleted by s::f::rename, in which case the code is: + || e.code() == std::errc::directory_not_empty) { + return; + } + else + throw SysError("moving temporary git repository from %s to %s", tmpDir, path); + } + // we successfully moved the repository, so the temporary directory no longer exists. + delTmpDir.cancel(); +} + struct GitRepoImpl : GitRepo, std::enable_shared_from_this { /** Location of the repository on disk. */ std::filesystem::path path; + /** + * libgit2 repository. Note that new objects are not written to disk, + * because we are using a mempack backend. For writing to disk, see + * `flush()`, which is also called by `GitFileSystemObjectSink::sync()`. + */ Repository repo; + /** + * In-memory object store for efficient batched writing to packfiles. + * Owned by `repo`. + */ + git_odb_backend * mempack_backend; GitRepoImpl(std::filesystem::path _path, bool create, bool bare) : path(std::move(_path)) { initLibGit2(); - if (pathExists(path.string())) { - if (git_repository_open(Setter(repo), path.string().c_str())) - throw Error("opening Git repository '%s': %s", path, git_error_last()->message); - } else { - if (git_repository_init(Setter(repo), path.string().c_str(), bare)) - throw Error("creating Git repository '%s': %s", path, git_error_last()->message); - } + initRepoAtomically(path, bare); + if (git_repository_open(Setter(repo), path.string().c_str())) + throw Error("opening Git repository %s: %s", path, git_error_last()->message); + + ObjectDb odb; + if (git_repository_odb(Setter(odb), repo.get())) + throw Error("getting Git object database: %s", git_error_last()->message); + + // mempack_backend will be owned by the repository, so we are not expected to free it ourselves. + if (git_mempack_new(&mempack_backend)) + throw Error("creating mempack backend: %s", git_error_last()->message); + + if (git_odb_add_backend(odb.get(), mempack_backend, 999)) + throw Error("adding mempack backend to Git object database: %s", git_error_last()->message); } operator git_repository * () @@ -184,6 +272,62 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return repo.get(); } + void flush() override { + checkInterrupt(); + + git_buf buf = GIT_BUF_INIT; + Finally _disposeBuf { [&] { git_buf_dispose(&buf); } }; + PackBuilder packBuilder; + PackBuilderContext packBuilderContext; + git_packbuilder_new(Setter(packBuilder), *this); + git_packbuilder_set_callbacks(packBuilder.get(), PACKBUILDER_PROGRESS_CHECK_INTERRUPT, &packBuilderContext); + git_packbuilder_set_threads(packBuilder.get(), 0 /* autodetect */); + + packBuilderContext.handleException( + "preparing packfile", + git_mempack_write_thin_pack(mempack_backend, packBuilder.get()) + ); + checkInterrupt(); + packBuilderContext.handleException( + "writing packfile", + git_packbuilder_write_buf(&buf, packBuilder.get()) + ); + checkInterrupt(); + + std::string repo_path = std::string(git_repository_path(repo.get())); + while (!repo_path.empty() && repo_path.back() == '/') + repo_path.pop_back(); + std::string pack_dir_path = repo_path + "/objects/pack"; + + // TODO (performance): could the indexing be done in a separate thread? + // we'd need a more streaming variation of + // git_packbuilder_write_buf, or incur the cost of + // copying parts of the buffer to a separate thread. + // (synchronously on the git_packbuilder_write_buf thread) + Indexer indexer; + git_indexer_progress stats; + if (git_indexer_new(Setter(indexer), pack_dir_path.c_str(), 0, nullptr, nullptr)) + throw Error("creating git packfile indexer: %s", git_error_last()->message); + + // TODO: provide index callback for checkInterrupt() termination + // though this is about an order of magnitude faster than the packbuilder + // expect up to 1 sec latency due to uninterruptible git_indexer_append. + constexpr size_t chunkSize = 128 * 1024; + for (size_t offset = 0; offset < buf.size; offset += chunkSize) { + if (git_indexer_append(indexer.get(), buf.ptr + offset, std::min(chunkSize, buf.size - offset), &stats)) + throw Error("appending to git packfile index: %s", git_error_last()->message); + checkInterrupt(); + } + + if (git_indexer_commit(indexer.get(), &stats)) + throw Error("committing git packfile index: %s", git_error_last()->message); + + if (git_mempack_reset(mempack_backend)) + throw Error("resetting git mempack backend: %s", git_error_last()->message); + + checkInterrupt(); + } + uint64_t getRevCount(const Hash & rev) override { std::unordered_set done; @@ -460,7 +604,13 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this std::string re = R"(Good "git" signature for \* with .* key SHA256:[)"; for (const fetchers::PublicKey & k : publicKeys){ // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally - auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "="); + std::string keyDecoded; + try { + keyDecoded = base64Decode(k.key); + } catch (Error & e) { + e.addTrace({}, "while decoding public key '%s' used for git signature", k.key); + } + auto fingerprint = trim(hashString(HashAlgorithm::SHA256, keyDecoded).to_string(nix::HashFormat::Base64, false), "="); auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" ); re += "(" + escaped_fingerprint + ")"; } @@ -601,12 +751,16 @@ struct GitSourceAccessor : SourceAccessor return readBlob(path, true); } - Hash getSubmoduleRev(const CanonPath & path) + /** + * If `path` exists and is a submodule, return its + * revision. Otherwise return nothing. + */ + std::optional getSubmoduleRev(const CanonPath & path) { - auto entry = need(path); + auto entry = lookup(path); - if (git_tree_entry_type(entry) != GIT_OBJECT_COMMIT) - throw Error("'%s' is not a submodule", showPath(path)); + if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_COMMIT) + return std::nullopt; return toHash(*git_tree_entry_id(entry)); } @@ -827,8 +981,24 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink void pushBuilder(std::string name) { + const git_tree_entry * entry; + Tree prevTree = nullptr; + + if (!pendingDirs.empty() && + (entry = git_treebuilder_get(pendingDirs.back().builder.get(), name.c_str()))) + { + /* Clone a tree that we've already finished. This happens + if a tarball has directory entries that are not + contiguous. */ + if (git_tree_entry_type(entry) != GIT_OBJECT_TREE) + throw Error("parent of '%s' is not a directory", name); + + if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(prevTree), *repo, entry)) + throw Error("looking up parent of '%s': %s", name, git_error_last()->message); + } + git_treebuilder * b; - if (git_treebuilder_new(&b, *repo, nullptr)) + if (git_treebuilder_new(&b, *repo, prevTree.get())) throw Error("creating a tree builder: %s", git_error_last()->message); pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) }); }; @@ -1002,12 +1172,14 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink git_tree_entry_filemode(entry)); } - Hash sync() override + Hash flush() override { updateBuilders({}); auto [oid, _name] = popBuilder(); + repo->flush(); + return toHash(oid); } }; @@ -1074,8 +1246,10 @@ std::vector> GitRepoImpl::getSubmodules auto rawAccessor = getRawAccessor(rev); for (auto & submodule : parseSubmodules(pathTemp)) { - auto rev = rawAccessor->getSubmoduleRev(submodule.path); - result.push_back({std::move(submodule), rev}); + /* Filter out .gitmodules entries that don't exist or are not + submodules. */ + if (auto rev = rawAccessor->getSubmoduleRev(submodule.path)) + result.push_back({std::move(submodule), *rev}); } return result; @@ -1083,7 +1257,7 @@ std::vector> GitRepoImpl::getSubmodules ref getTarballCache() { - static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache"; + static auto repoDir = std::filesystem::path(getCacheDir()) / "tarball-cache"; return GitRepo::openRepo(repoDir, true, true); } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 915252868..f45b5a504 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -7,12 +7,16 @@ namespace nix { namespace fetchers { struct PublicKey; } +/** + * A sink that writes into a Git repository. Note that nothing may be written + * until `flush()` is called. + */ struct GitFileSystemObjectSink : ExtendedFileSystemObjectSink { /** * Flush builder and return a final Git hash. */ - virtual Hash sync() = 0; + virtual Hash flush() = 0; }; struct GitRepo @@ -80,6 +84,8 @@ struct GitRepo virtual ref getFileSystemObjectSink() = 0; + virtual void flush() = 0; + virtual void fetch( const std::string & url, const std::string & refspec, diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 13939e8c3..f7d7c2e1e 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -13,8 +13,8 @@ #include "git-utils.hh" #include "logging.hh" #include "finally.hh" - #include "fetch-settings.hh" +#include "json-utils.hh" #include #include @@ -44,7 +44,7 @@ bool isCacheFileWithinTtl(time_t now, const struct stat & st) Path getCachePath(std::string_view key, bool shallow) { return getCacheDir() - + "/nix/gitv3/" + + "/gitv3/" + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) + (shallow ? "-shallow" : ""); } @@ -585,9 +585,10 @@ struct GitInputScheme : InputScheme } try { - setWriteTime(localRefFile, now, now); + if (!input.getRev()) + setWriteTime(localRefFile, now, now); } catch (Error & e) { - warn("could not update mtime for file '%s': %s", localRefFile, e.msg()); + warn("could not update mtime for file '%s': %s", localRefFile, e.info().msg); } if (!originalRef && !storeCachedHead(repoInfo.url, ref)) warn("could not update cached head '%s' for '%s'", ref, repoInfo.url); diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 2e914164a..308cff33a 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -261,11 +261,12 @@ struct GitArchiveInputScheme : InputScheme auto tarballCache = getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); + auto tree = parseSink->flush(); act.reset(); TarballInfo tarballInfo { - .treeHash = tarballCache->dereferenceSingletonDirectory(parseSink->sync()), + .treeHash = tarballCache->dereferenceSingletonDirectory(tree), .lastModified = lastModified }; diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 3feb3cb19..2c987f79d 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -263,7 +263,7 @@ struct MercurialInputScheme : InputScheme return makeResult(res->value, res->storePath); } - Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); + Path cacheDir = fmt("%s/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ diff --git a/src/libfetchers/package.nix b/src/libfetchers/package.nix index 9b5d8bff7..70973bdb2 100644 --- a/src/libfetchers/package.nix +++ b/src/libfetchers/package.nix @@ -1,17 +1,11 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util , nix-store , nlohmann_json , libgit2 -, man # Configuration Options @@ -22,7 +16,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-fetchers"; inherit version; @@ -37,14 +31,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ libgit2 ]; @@ -67,10 +53,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index fca0df84b..fe1534aba 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -157,7 +157,11 @@ struct PathInputScheme : InputScheme }); storePath = store->addToStoreFromDump(*src, "source"); } - input.attrs.insert_or_assign("lastModified", uint64_t(mtime)); + + /* Trust the lastModified value supplied by the user, if + any. It's not a "secure" attribute so we don't care. */ + if (!input.getLastModified()) + input.attrs.insert_or_assign("lastModified", uint64_t(mtime)); return {makeStorePathAccessor(store, *storePath), std::move(input)}; } diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 3c893c8ea..7f7a09053 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -116,7 +116,7 @@ static std::shared_ptr getSystemRegistry(const Settings & settings) Path getUserRegistryPath() { - return getConfigDir() + "/nix/registry.json"; + return getConfigDir() + "/registry.json"; } std::shared_ptr getUserRegistry(const Settings & settings) @@ -159,7 +159,7 @@ static std::shared_ptr getGlobalRegistry(const Settings & settings, re if (!hasPrefix(path, "/")) { auto storePath = downloadFile(store, path, "flake-registry.json").storePath; if (auto store2 = store.dynamic_pointer_cast()) - store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json"); + store2->addPermRoot(storePath, getCacheDir() + "/flake-registry.json"); path = store->toRealPath(storePath); } diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 457210542..28574e7b1 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -90,6 +90,7 @@ DownloadFileResult downloadFile( /* Cache metadata for all URLs in the redirect chain. */ for (auto & url : res.urls) { key.second.insert_or_assign("url", url); + assert(!res.urls.empty()); infoAttrs.insert_or_assign("url", *res.urls.rbegin()); getCache()->upsert(key, *store, infoAttrs, *storePath); } @@ -102,7 +103,7 @@ DownloadFileResult downloadFile( }; } -DownloadTarballResult downloadTarball( +static DownloadTarballResult downloadTarball_( const std::string & url, const Headers & headers) { @@ -170,6 +171,7 @@ DownloadTarballResult downloadTarball( auto tarballCache = getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); + auto tree = parseSink->flush(); act.reset(); @@ -184,7 +186,7 @@ DownloadTarballResult downloadTarball( } else { infoAttrs.insert_or_assign("etag", res->etag); infoAttrs.insert_or_assign("treeHash", - tarballCache->dereferenceSingletonDirectory(parseSink->sync()).gitRev()); + tarballCache->dereferenceSingletonDirectory(tree).gitRev()); infoAttrs.insert_or_assign("lastModified", uint64_t(lastModified)); if (res->immutableUrl) infoAttrs.insert_or_assign("immutableUrl", *res->immutableUrl); @@ -202,6 +204,22 @@ DownloadTarballResult downloadTarball( return attrsToResult(infoAttrs); } +ref downloadTarball( + ref store, + const Settings & settings, + const std::string & url) +{ + /* Go through Input::getAccessor() to ensure that the resulting + accessor has a fingerprint. */ + fetchers::Attrs attrs; + attrs.insert_or_assign("type", "tarball"); + attrs.insert_or_assign("url", url); + + auto input = Input::fromAttrs(settings, std::move(attrs)); + + return input.getAccessor(store).first; +} + // An input scheme corresponding to a curl-downloadable resource. struct CurlInputScheme : InputScheme { @@ -353,7 +371,7 @@ struct TarballInputScheme : CurlInputScheme { auto input(_input); - auto result = downloadTarball(getStrAttr(input.attrs, "url"), {}); + auto result = downloadTarball_(getStrAttr(input.attrs, "url"), {}); result.accessor->setPathDisplay("«" + input.to_string() + "»"); diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh index d9bdd123d..2042041d5 100644 --- a/src/libfetchers/tarball.hh +++ b/src/libfetchers/tarball.hh @@ -14,6 +14,8 @@ struct SourceAccessor; namespace nix::fetchers { +struct Settings; + struct DownloadFileResult { StorePath storePath; @@ -40,8 +42,9 @@ struct DownloadTarballResult * Download and import a tarball into the Git cache. The result is the * Git tree hash of the root directory. */ -DownloadTarballResult downloadTarball( - const std::string & url, - const Headers & headers = {}); +ref downloadTarball( + ref store, + const Settings & settings, + const std::string & url); } diff --git a/src/libflake-tests/.version b/src/libflake-tests/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libflake-tests/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libflake-tests/build-utils-meson b/src/libflake-tests/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libflake-tests/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/tests/unit/libflake/data/.gitkeep b/src/libflake-tests/data/.gitkeep similarity index 100% rename from tests/unit/libflake/data/.gitkeep rename to src/libflake-tests/data/.gitkeep diff --git a/tests/unit/libflake/flakeref.cc b/src/libflake-tests/flakeref.cc similarity index 100% rename from tests/unit/libflake/flakeref.cc rename to src/libflake-tests/flakeref.cc diff --git a/src/libflake-tests/local.mk b/src/libflake-tests/local.mk new file mode 100644 index 000000000..8599b43f6 --- /dev/null +++ b/src/libflake-tests/local.mk @@ -0,0 +1,43 @@ +check: libflake-tests_RUN + +programs += libflake-tests + +libflake-tests_NAME := libnixflake-tests + +libflake-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libflake-tests.xml + +libflake-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libflake-tests_INSTALL_DIR := $(checkbindir) +else + libflake-tests_INSTALL_DIR := +endif + +libflake-tests_SOURCES := \ + $(wildcard $(d)/*.cc) \ + $(wildcard $(d)/value/*.cc) \ + $(wildcard $(d)/flake/*.cc) + +libflake-tests_EXTRA_INCLUDES = \ + -I src/libflake-test-support \ + -I src/libstore-test-support \ + -I src/libutil-test-support \ + $(INCLUDE_libflake) \ + $(INCLUDE_libexpr) \ + $(INCLUDE_libfetchers) \ + $(INCLUDE_libstore) \ + $(INCLUDE_libutil) \ + +libflake-tests_CXXFLAGS += $(libflake-tests_EXTRA_INCLUDES) + +libflake-tests_LIBS = \ + libexpr-test-support libstore-test-support libutil-test-support \ + libflake libexpr libfetchers libstore libutil + +libflake-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock + +ifdef HOST_WINDOWS + # Increase the default reserved stack size to 65 MB so Nix doesn't run out of space + libflake-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024))) +endif diff --git a/tests/unit/libflake/meson.build b/src/libflake-tests/meson.build similarity index 97% rename from tests/unit/libflake/meson.build rename to src/libflake-tests/meson.build index c022d7f41..592a7493b 100644 --- a/tests/unit/libflake/meson.build +++ b/src/libflake-tests/meson.build @@ -27,6 +27,7 @@ subdir('build-utils-meson/subprojects') subdir('build-utils-meson/threads') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') rapidcheck = dependency('rapidcheck') deps_private += rapidcheck diff --git a/tests/unit/libflake/package.nix b/src/libflake-tests/package.nix similarity index 80% rename from tests/unit/libflake/package.nix rename to src/libflake-tests/package.nix index 0d63d2ff7..67e716979 100644 --- a/tests/unit/libflake/package.nix +++ b/src/libflake-tests/package.nix @@ -1,12 +1,7 @@ { lib , buildPackages , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonExecutable , nix-flake , nix-expr-test-support @@ -25,15 +20,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonExecutable (finalAttrs: { pname = "nix-flake-tests"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -41,12 +36,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ nix-flake nix-expr-test-support @@ -59,7 +48,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -69,10 +58,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru = { tests = { run = runCommand "${finalAttrs.pname}-run" { diff --git a/tests/unit/libflake/url-name.cc b/src/libflake-tests/url-name.cc similarity index 100% rename from tests/unit/libflake/url-name.cc rename to src/libflake-tests/url-name.cc diff --git a/src/libflake/flake/config.cc b/src/libflake/flake/config.cc index 52f541543..5d655de21 100644 --- a/src/libflake/flake/config.cc +++ b/src/libflake/flake/config.cc @@ -12,7 +12,7 @@ typedef std::map> TrustedList; Path trustedListPath() { - return getDataDir() + "/nix/trusted-settings.json"; + return getDataDir() + "/trusted-settings.json"; } static TrustedList readTrustedList() diff --git a/src/libflake/flake/flake.cc b/src/libflake/flake/flake.cc index f7fe4df54..9b1066a1e 100644 --- a/src/libflake/flake/flake.cc +++ b/src/libflake/flake/flake.cc @@ -13,6 +13,8 @@ #include "value-to-json.hh" #include "local-fs-store.hh" +#include + namespace nix { using namespace flake; @@ -750,6 +752,21 @@ LockedFlake lockFlake( } } +std::pair sourcePathToStorePath( + ref store, + const SourcePath & _path) +{ + auto path = _path.path.abs(); + + if (auto store2 = store.dynamic_pointer_cast()) { + auto realStoreDir = store2->getRealStoreDir(); + if (isInDir(path, realStoreDir)) + path = store2->storeDir + path.substr(realStoreDir.size()); + } + + return store->toStorePath(path); +} + void callFlake(EvalState & state, const LockedFlake & lockedFlake, Value & vRes) @@ -767,17 +784,7 @@ void callFlake(EvalState & state, auto lockedNode = node.dynamic_pointer_cast(); - // FIXME: This is a hack to support chroot stores. Remove this - // once we can pass a sourcePath rather than a storePath to - // call-flake.nix. - auto path = sourcePath.path.abs(); - if (auto store = state.store.dynamic_pointer_cast()) { - auto realStoreDir = store->getRealStoreDir(); - if (isInDir(path, realStoreDir)) - path = store->storeDir + path.substr(realStoreDir.size()); - } - - auto [storePath, subdir] = state.store->toStorePath(path); + auto [storePath, subdir] = sourcePathToStorePath(state.store, sourcePath); emitTreeAttrs( state, diff --git a/src/libflake/flake/flake.hh b/src/libflake/flake/flake.hh index cce17009c..496e18673 100644 --- a/src/libflake/flake/flake.hh +++ b/src/libflake/flake/flake.hh @@ -214,6 +214,16 @@ void callFlake( const LockedFlake & lockedFlake, Value & v); +/** + * Map a `SourcePath` to the corresponding store path. This is a + * temporary hack to support chroot stores while we don't have full + * lazy trees. FIXME: Remove this once we can pass a sourcePath rather + * than a storePath to call-flake.nix. + */ +std::pair sourcePathToStorePath( + ref store, + const SourcePath & path); + } void emitTreeAttrs( diff --git a/src/libflake/flake/flakeref.cc b/src/libflake/flake/flakeref.cc index a57fce9f3..01fe747f9 100644 --- a/src/libflake/flake/flakeref.cc +++ b/src/libflake/flake/flakeref.cc @@ -88,7 +88,7 @@ std::pair parsePathFlakeRefWithFragment( if (fragmentStart != std::string::npos) { fragment = percentDecode(url.substr(fragmentStart+1)); } - if (pathEnd != std::string::npos && fragmentStart != std::string::npos) { + if (pathEnd != std::string::npos && fragmentStart != std::string::npos && url[pathEnd] == '?') { query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1)); } diff --git a/src/libflake/flake/lockfile.cc b/src/libflake/flake/lockfile.cc index 80f14ff6f..70b60716f 100644 --- a/src/libflake/flake/lockfile.cc +++ b/src/libflake/flake/lockfile.cc @@ -54,12 +54,13 @@ StorePath LockedNode::computeStorePath(Store & store) const } -static std::shared_ptr doFind(const ref& root, const InputPath & path, std::vector& visited) { +static std::shared_ptr doFind(const ref & root, const InputPath & path, std::vector & visited) +{ auto pos = root; auto found = std::find(visited.cbegin(), visited.cend(), path); - if(found != visited.end()) { + if (found != visited.end()) { std::vector cycle; std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath); cycle.push_back(printInputPath(path)); diff --git a/src/libflake/package.nix b/src/libflake/package.nix index 851adf07e..fff481720 100644 --- a/src/libflake/package.nix +++ b/src/libflake/package.nix @@ -1,19 +1,12 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util , nix-store , nix-fetchers , nix-expr , nlohmann_json -, libgit2 -, man # Configuration Options @@ -24,7 +17,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-flake"; inherit version; @@ -39,14 +32,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-store nix-util @@ -67,10 +52,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libmain-c/meson.build b/src/libmain-c/meson.build index 345382712..0ec0e3f6d 100644 --- a/src/libmain-c/meson.build +++ b/src/libmain-c/meson.build @@ -68,6 +68,7 @@ headers = [config_h] + files( ) subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nixmainc', diff --git a/src/libmain-c/package.nix b/src/libmain-c/package.nix index ce6f67300..5522037f3 100644 --- a/src/libmain-c/package.nix +++ b/src/libmain-c/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util-c , nix-store @@ -21,7 +16,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-main-c"; inherit version; @@ -38,14 +33,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "h") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-util-c nix-store @@ -68,10 +55,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libmain/package.nix b/src/libmain/package.nix index 47513dbdc..7e7b80472 100644 --- a/src/libmain/package.nix +++ b/src/libmain/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , openssl @@ -21,7 +16,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-main"; inherit version; @@ -36,14 +31,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-util nix-store @@ -62,10 +49,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index bb4c52ef7..fa0b73ebe 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -77,6 +77,9 @@ private: bool haveUpdate = true; }; + /** Helps avoid unnecessary redraws, see `redraw()` */ + Sync lastOutput_; + Sync state_; std::thread updateThread; @@ -155,10 +158,10 @@ public: { auto state(state_.lock()); - std::stringstream oss; + std::ostringstream oss; showErrorInfo(oss, ei, loggerSettings.showTrace.get()); - log(*state, ei.level, oss.str()); + log(*state, ei.level, toView(oss)); } void log(State & state, Verbosity lvl, std::string_view s) @@ -359,6 +362,22 @@ public: updateCV.notify_one(); } + /** + * Redraw, if the output has changed. + * + * Excessive redrawing is noticable on slow terminals, and it interferes + * with text selection in some terminals, including libvte-based terminal + * emulators. + */ + void redraw(std::string newOutput) + { + auto lastOutput(lastOutput_.lock()); + if (newOutput != *lastOutput) { + writeToStderr(newOutput); + *lastOutput = std::move(newOutput); + } + } + std::chrono::milliseconds draw(State & state) { auto nextWakeup = std::chrono::milliseconds::max(); @@ -412,7 +431,7 @@ public: auto width = getWindowSize().second; if (width <= 0) width = std::numeric_limits::max(); - writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K"); + redraw("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K"); return nextWakeup; } @@ -524,7 +543,7 @@ public: auto state(state_.lock()); if (!state->active) return {}; std::cerr << fmt("\r\e[K%s ", msg); - auto s = trim(readLine(STDIN_FILENO)); + auto s = trim(readLine(getStandardInput(), true)); if (s.size() != 1) return {}; draw(*state); return s[0]; diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 8ba1af2d3..3c026ee9d 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -419,7 +419,7 @@ RunPager::~RunPager() } #endif } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } diff --git a/src/libstore-c/meson.build b/src/libstore-c/meson.build index 4bfd944c6..d4f86eeff 100644 --- a/src/libstore-c/meson.build +++ b/src/libstore-c/meson.build @@ -67,6 +67,7 @@ headers = [config_h] + files( headers += files('nix_api_store_internal.h') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nixstorec', diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 79841ca49..fb7391276 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -144,3 +144,15 @@ StorePath * nix_store_path_clone(const StorePath * p) { return new StorePath{p->path}; } + +nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * dstStore, StorePath * path) +{ + if (context) + context->last_err_code = NIX_OK; + try { + nix::RealisedPath::Set paths; + paths.insert(path->path); + nix::copyClosure(*srcStore->ptr, *dstStore->ptr, paths); + } + NIXC_CATCH_ERRS +} diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index 4b2134457..93208cb7c 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -161,6 +161,16 @@ nix_err nix_store_realise( nix_err nix_store_get_version(nix_c_context * context, Store * store, nix_get_string_callback callback, void * user_data); +/** + * @brief Copy the closure of `path` from `srcStore` to `dstStore`. + * + * @param[out] context Optional, stores error information + * @param[in] srcStore nix source store reference + * @param[in] srcStore nix destination store reference + * @param[in] path Path to copy + */ +nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * dstStore, StorePath * path); + // cffi end #ifdef __cplusplus } diff --git a/src/libstore-c/package.nix b/src/libstore-c/package.nix index e4f372236..896a1a39f 100644 --- a/src/libstore-c/package.nix +++ b/src/libstore-c/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util-c , nix-store @@ -19,7 +14,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-store-c"; inherit version; @@ -36,14 +31,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "h") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-util-c nix-store @@ -64,10 +51,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libstore-test-support/.version b/src/libstore-test-support/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libstore-test-support/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libstore-test-support/build-utils-meson b/src/libstore-test-support/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libstore-test-support/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/src/libstore-test-support/local.mk b/src/libstore-test-support/local.mk new file mode 100644 index 000000000..56dedd825 --- /dev/null +++ b/src/libstore-test-support/local.mk @@ -0,0 +1,21 @@ +libraries += libstore-test-support + +libstore-test-support_NAME = libnixstore-test-support + +libstore-test-support_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libstore-test-support_INSTALL_DIR := $(checklibdir) +else + libstore-test-support_INSTALL_DIR := +endif + +libstore-test-support_SOURCES := $(wildcard $(d)/tests/*.cc) + +libstore-test-support_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES) + +libstore-test-support_LIBS = \ + libutil-test-support \ + libstore libutil + +libstore-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libstore-support/meson.build b/src/libstore-test-support/meson.build similarity index 97% rename from tests/unit/libstore-support/meson.build rename to src/libstore-test-support/meson.build index f09d26a31..98ec9882e 100644 --- a/tests/unit/libstore-support/meson.build +++ b/src/libstore-test-support/meson.build @@ -58,6 +58,7 @@ headers = files( ) subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nix-store-test-support', diff --git a/tests/unit/libstore-support/package.nix b/src/libstore-test-support/package.nix similarity index 69% rename from tests/unit/libstore-support/package.nix rename to src/libstore-test-support/package.nix index f512db3ee..48f8b5e6b 100644 --- a/tests/unit/libstore-support/package.nix +++ b/src/libstore-test-support/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util-test-support , nix-store @@ -21,15 +16,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-store-test-support"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -37,14 +32,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-util-test-support nix-store @@ -56,7 +43,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -66,10 +53,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/tests/unit/libstore-support/tests/derived-path.cc b/src/libstore-test-support/tests/derived-path.cc similarity index 100% rename from tests/unit/libstore-support/tests/derived-path.cc rename to src/libstore-test-support/tests/derived-path.cc diff --git a/tests/unit/libstore-support/tests/derived-path.hh b/src/libstore-test-support/tests/derived-path.hh similarity index 100% rename from tests/unit/libstore-support/tests/derived-path.hh rename to src/libstore-test-support/tests/derived-path.hh diff --git a/tests/unit/libstore-support/tests/libstore.hh b/src/libstore-test-support/tests/libstore.hh similarity index 100% rename from tests/unit/libstore-support/tests/libstore.hh rename to src/libstore-test-support/tests/libstore.hh diff --git a/tests/unit/libstore-support/tests/nix_api_store.hh b/src/libstore-test-support/tests/nix_api_store.hh similarity index 75% rename from tests/unit/libstore-support/tests/nix_api_store.hh rename to src/libstore-test-support/tests/nix_api_store.hh index a2d35d083..b7d5c2c33 100644 --- a/tests/unit/libstore-support/tests/nix_api_store.hh +++ b/src/libstore-test-support/tests/nix_api_store.hh @@ -3,6 +3,7 @@ #include "tests/nix_api_util.hh" #include "file-system.hh" +#include #include "nix_api_store.h" #include "nix_api_store_internal.h" @@ -10,7 +11,7 @@ #include #include -namespace fs = std::filesystem; +namespace fs { using namespace std::filesystem; } namespace nixC { class nix_api_store_test : public nix_api_util_context @@ -47,7 +48,9 @@ protected: if (fs::create_directory(nixDir)) break; } #else - auto tmpl = nix::defaultTempDir() + "/tests_nix-store.XXXXXX"; + // resolve any symlinks in i.e. on macOS /tmp -> /private/tmp + // because this is not allowed for a nix store. + auto tmpl = nix::absPath(std::filesystem::path(nix::defaultTempDir()) / "tests_nix-store.XXXXXX", true); nixDir = mkdtemp((char *) tmpl.c_str()); #endif @@ -61,6 +64,10 @@ protected: const char ** params[] = {p1, p2, p3, nullptr}; store = nix_store_open(ctx, "local", params); + if (!store) { + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_NE(store, nullptr) << "Could not open store: " << errMsg; + }; } }; } diff --git a/tests/unit/libstore-support/tests/outputs-spec.cc b/src/libstore-test-support/tests/outputs-spec.cc similarity index 100% rename from tests/unit/libstore-support/tests/outputs-spec.cc rename to src/libstore-test-support/tests/outputs-spec.cc diff --git a/tests/unit/libstore-support/tests/outputs-spec.hh b/src/libstore-test-support/tests/outputs-spec.hh similarity index 100% rename from tests/unit/libstore-support/tests/outputs-spec.hh rename to src/libstore-test-support/tests/outputs-spec.hh diff --git a/tests/unit/libstore-support/tests/path.cc b/src/libstore-test-support/tests/path.cc similarity index 100% rename from tests/unit/libstore-support/tests/path.cc rename to src/libstore-test-support/tests/path.cc diff --git a/tests/unit/libstore-support/tests/path.hh b/src/libstore-test-support/tests/path.hh similarity index 100% rename from tests/unit/libstore-support/tests/path.hh rename to src/libstore-test-support/tests/path.hh diff --git a/tests/unit/libstore-support/tests/protocol.hh b/src/libstore-test-support/tests/protocol.hh similarity index 89% rename from tests/unit/libstore-support/tests/protocol.hh rename to src/libstore-test-support/tests/protocol.hh index 3c9e52c11..3f6799d1c 100644 --- a/tests/unit/libstore-support/tests/protocol.hh +++ b/src/libstore-test-support/tests/protocol.hh @@ -12,10 +12,10 @@ namespace nix { template class ProtoTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/" + protocolDir; + std::filesystem::path unitTestData = getUnitTestData() / protocolDir; - Path goldenMaster(std::string_view testStem) const override { - return unitTestData + "/" + testStem + ".bin"; + std::filesystem::path goldenMaster(std::string_view testStem) const override { + return unitTestData / (std::string { testStem + ".bin" }); } }; diff --git a/src/libstore-tests/.version b/src/libstore-tests/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libstore-tests/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libstore-tests/build-utils-meson b/src/libstore-tests/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libstore-tests/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/tests/unit/libstore/common-protocol.cc b/src/libstore-tests/common-protocol.cc similarity index 100% rename from tests/unit/libstore/common-protocol.cc rename to src/libstore-tests/common-protocol.cc diff --git a/tests/unit/libstore/content-address.cc b/src/libstore-tests/content-address.cc similarity index 100% rename from tests/unit/libstore/content-address.cc rename to src/libstore-tests/content-address.cc diff --git a/tests/unit/libstore/data/common-protocol/content-address.bin b/src/libstore-tests/data/common-protocol/content-address.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/content-address.bin rename to src/libstore-tests/data/common-protocol/content-address.bin diff --git a/tests/unit/libstore/data/common-protocol/drv-output.bin b/src/libstore-tests/data/common-protocol/drv-output.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/drv-output.bin rename to src/libstore-tests/data/common-protocol/drv-output.bin diff --git a/tests/unit/libstore/data/common-protocol/optional-content-address.bin b/src/libstore-tests/data/common-protocol/optional-content-address.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/optional-content-address.bin rename to src/libstore-tests/data/common-protocol/optional-content-address.bin diff --git a/tests/unit/libstore/data/common-protocol/optional-store-path.bin b/src/libstore-tests/data/common-protocol/optional-store-path.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/optional-store-path.bin rename to src/libstore-tests/data/common-protocol/optional-store-path.bin diff --git a/tests/unit/libstore/data/common-protocol/realisation.bin b/src/libstore-tests/data/common-protocol/realisation.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/realisation.bin rename to src/libstore-tests/data/common-protocol/realisation.bin diff --git a/tests/unit/libstore/data/common-protocol/set.bin b/src/libstore-tests/data/common-protocol/set.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/set.bin rename to src/libstore-tests/data/common-protocol/set.bin diff --git a/tests/unit/libstore/data/common-protocol/store-path.bin b/src/libstore-tests/data/common-protocol/store-path.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/store-path.bin rename to src/libstore-tests/data/common-protocol/store-path.bin diff --git a/tests/unit/libstore/data/common-protocol/string.bin b/src/libstore-tests/data/common-protocol/string.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/string.bin rename to src/libstore-tests/data/common-protocol/string.bin diff --git a/tests/unit/libstore/data/common-protocol/vector.bin b/src/libstore-tests/data/common-protocol/vector.bin similarity index 100% rename from tests/unit/libstore/data/common-protocol/vector.bin rename to src/libstore-tests/data/common-protocol/vector.bin diff --git a/src/libstore-tests/data/derivation/advanced-attributes-defaults.drv b/src/libstore-tests/data/derivation/advanced-attributes-defaults.drv new file mode 120000 index 000000000..f8f30ac32 --- /dev/null +++ b/src/libstore-tests/data/derivation/advanced-attributes-defaults.drv @@ -0,0 +1 @@ +../../../../tests/functional/derivation/advanced-attributes-defaults.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/advanced-attributes-defaults.json similarity index 100% rename from tests/unit/libstore/data/derivation/advanced-attributes-defaults.json rename to src/libstore-tests/data/derivation/advanced-attributes-defaults.json diff --git a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv new file mode 120000 index 000000000..837e9a0e4 --- /dev/null +++ b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.drv @@ -0,0 +1 @@ +../../../../tests/functional/derivation/advanced-attributes-structured-attrs-defaults.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.json similarity index 100% rename from tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs-defaults.json rename to src/libstore-tests/data/derivation/advanced-attributes-structured-attrs-defaults.json diff --git a/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv new file mode 120000 index 000000000..e08bb5737 --- /dev/null +++ b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.drv @@ -0,0 +1 @@ +../../../../tests/functional/derivation/advanced-attributes-structured-attrs.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.json similarity index 100% rename from tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs.json rename to src/libstore-tests/data/derivation/advanced-attributes-structured-attrs.json diff --git a/src/libstore-tests/data/derivation/advanced-attributes.drv b/src/libstore-tests/data/derivation/advanced-attributes.drv new file mode 120000 index 000000000..1dc394a0a --- /dev/null +++ b/src/libstore-tests/data/derivation/advanced-attributes.drv @@ -0,0 +1 @@ +../../../../tests/functional/derivation/advanced-attributes.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/bad-old-version-dyn-deps.drv b/src/libstore-tests/data/derivation/bad-old-version-dyn-deps.drv similarity index 100% rename from tests/unit/libstore/data/derivation/bad-old-version-dyn-deps.drv rename to src/libstore-tests/data/derivation/bad-old-version-dyn-deps.drv diff --git a/tests/unit/libstore/data/derivation/bad-version.drv b/src/libstore-tests/data/derivation/bad-version.drv similarity index 100% rename from tests/unit/libstore/data/derivation/bad-version.drv rename to src/libstore-tests/data/derivation/bad-version.drv diff --git a/tests/unit/libstore/data/derivation/dynDerivationDeps.drv b/src/libstore-tests/data/derivation/dynDerivationDeps.drv similarity index 100% rename from tests/unit/libstore/data/derivation/dynDerivationDeps.drv rename to src/libstore-tests/data/derivation/dynDerivationDeps.drv diff --git a/tests/unit/libstore/data/derivation/dynDerivationDeps.json b/src/libstore-tests/data/derivation/dynDerivationDeps.json similarity index 100% rename from tests/unit/libstore/data/derivation/dynDerivationDeps.json rename to src/libstore-tests/data/derivation/dynDerivationDeps.json diff --git a/tests/unit/libstore/data/derivation/output-caFixedFlat.json b/src/libstore-tests/data/derivation/output-caFixedFlat.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-caFixedFlat.json rename to src/libstore-tests/data/derivation/output-caFixedFlat.json diff --git a/tests/unit/libstore/data/derivation/output-caFixedNAR.json b/src/libstore-tests/data/derivation/output-caFixedNAR.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-caFixedNAR.json rename to src/libstore-tests/data/derivation/output-caFixedNAR.json diff --git a/tests/unit/libstore/data/derivation/output-caFixedText.json b/src/libstore-tests/data/derivation/output-caFixedText.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-caFixedText.json rename to src/libstore-tests/data/derivation/output-caFixedText.json diff --git a/tests/unit/libstore/data/derivation/output-caFloating.json b/src/libstore-tests/data/derivation/output-caFloating.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-caFloating.json rename to src/libstore-tests/data/derivation/output-caFloating.json diff --git a/tests/unit/libstore/data/derivation/output-deferred.json b/src/libstore-tests/data/derivation/output-deferred.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-deferred.json rename to src/libstore-tests/data/derivation/output-deferred.json diff --git a/tests/unit/libstore/data/derivation/output-impure.json b/src/libstore-tests/data/derivation/output-impure.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-impure.json rename to src/libstore-tests/data/derivation/output-impure.json diff --git a/tests/unit/libstore/data/derivation/output-inputAddressed.json b/src/libstore-tests/data/derivation/output-inputAddressed.json similarity index 100% rename from tests/unit/libstore/data/derivation/output-inputAddressed.json rename to src/libstore-tests/data/derivation/output-inputAddressed.json diff --git a/tests/unit/libstore/data/derivation/simple.drv b/src/libstore-tests/data/derivation/simple.drv similarity index 100% rename from tests/unit/libstore/data/derivation/simple.drv rename to src/libstore-tests/data/derivation/simple.drv diff --git a/tests/unit/libstore/data/derivation/simple.json b/src/libstore-tests/data/derivation/simple.json similarity index 100% rename from tests/unit/libstore/data/derivation/simple.json rename to src/libstore-tests/data/derivation/simple.json diff --git a/tests/unit/libstore/data/machines/bad_format b/src/libstore-tests/data/machines/bad_format similarity index 100% rename from tests/unit/libstore/data/machines/bad_format rename to src/libstore-tests/data/machines/bad_format diff --git a/tests/unit/libstore/data/machines/valid b/src/libstore-tests/data/machines/valid similarity index 100% rename from tests/unit/libstore/data/machines/valid rename to src/libstore-tests/data/machines/valid diff --git a/tests/unit/libstore/data/nar-info/impure.json b/src/libstore-tests/data/nar-info/impure.json similarity index 100% rename from tests/unit/libstore/data/nar-info/impure.json rename to src/libstore-tests/data/nar-info/impure.json diff --git a/tests/unit/libstore/data/nar-info/pure.json b/src/libstore-tests/data/nar-info/pure.json similarity index 100% rename from tests/unit/libstore/data/nar-info/pure.json rename to src/libstore-tests/data/nar-info/pure.json diff --git a/tests/unit/libstore/data/path-info/empty_impure.json b/src/libstore-tests/data/path-info/empty_impure.json similarity index 100% rename from tests/unit/libstore/data/path-info/empty_impure.json rename to src/libstore-tests/data/path-info/empty_impure.json diff --git a/tests/unit/libstore/data/path-info/empty_pure.json b/src/libstore-tests/data/path-info/empty_pure.json similarity index 100% rename from tests/unit/libstore/data/path-info/empty_pure.json rename to src/libstore-tests/data/path-info/empty_pure.json diff --git a/tests/unit/libstore/data/path-info/impure.json b/src/libstore-tests/data/path-info/impure.json similarity index 100% rename from tests/unit/libstore/data/path-info/impure.json rename to src/libstore-tests/data/path-info/impure.json diff --git a/tests/unit/libstore/data/path-info/pure.json b/src/libstore-tests/data/path-info/pure.json similarity index 100% rename from tests/unit/libstore/data/path-info/pure.json rename to src/libstore-tests/data/path-info/pure.json diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.1.bin b/src/libstore-tests/data/serve-protocol/build-options-2.1.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-options-2.1.bin rename to src/libstore-tests/data/serve-protocol/build-options-2.1.bin diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.2.bin b/src/libstore-tests/data/serve-protocol/build-options-2.2.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-options-2.2.bin rename to src/libstore-tests/data/serve-protocol/build-options-2.2.bin diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.3.bin b/src/libstore-tests/data/serve-protocol/build-options-2.3.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-options-2.3.bin rename to src/libstore-tests/data/serve-protocol/build-options-2.3.bin diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.7.bin b/src/libstore-tests/data/serve-protocol/build-options-2.7.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-options-2.7.bin rename to src/libstore-tests/data/serve-protocol/build-options-2.7.bin diff --git a/tests/unit/libstore/data/serve-protocol/build-result-2.2.bin b/src/libstore-tests/data/serve-protocol/build-result-2.2.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-result-2.2.bin rename to src/libstore-tests/data/serve-protocol/build-result-2.2.bin diff --git a/tests/unit/libstore/data/serve-protocol/build-result-2.3.bin b/src/libstore-tests/data/serve-protocol/build-result-2.3.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-result-2.3.bin rename to src/libstore-tests/data/serve-protocol/build-result-2.3.bin diff --git a/tests/unit/libstore/data/serve-protocol/build-result-2.6.bin b/src/libstore-tests/data/serve-protocol/build-result-2.6.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/build-result-2.6.bin rename to src/libstore-tests/data/serve-protocol/build-result-2.6.bin diff --git a/tests/unit/libstore/data/serve-protocol/content-address.bin b/src/libstore-tests/data/serve-protocol/content-address.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/content-address.bin rename to src/libstore-tests/data/serve-protocol/content-address.bin diff --git a/tests/unit/libstore/data/serve-protocol/drv-output.bin b/src/libstore-tests/data/serve-protocol/drv-output.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/drv-output.bin rename to src/libstore-tests/data/serve-protocol/drv-output.bin diff --git a/tests/unit/libstore/data/serve-protocol/handshake-to-client.bin b/src/libstore-tests/data/serve-protocol/handshake-to-client.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/handshake-to-client.bin rename to src/libstore-tests/data/serve-protocol/handshake-to-client.bin diff --git a/tests/unit/libstore/data/serve-protocol/optional-content-address.bin b/src/libstore-tests/data/serve-protocol/optional-content-address.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/optional-content-address.bin rename to src/libstore-tests/data/serve-protocol/optional-content-address.bin diff --git a/tests/unit/libstore/data/serve-protocol/optional-store-path.bin b/src/libstore-tests/data/serve-protocol/optional-store-path.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/optional-store-path.bin rename to src/libstore-tests/data/serve-protocol/optional-store-path.bin diff --git a/tests/unit/libstore/data/serve-protocol/realisation.bin b/src/libstore-tests/data/serve-protocol/realisation.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/realisation.bin rename to src/libstore-tests/data/serve-protocol/realisation.bin diff --git a/tests/unit/libstore/data/serve-protocol/set.bin b/src/libstore-tests/data/serve-protocol/set.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/set.bin rename to src/libstore-tests/data/serve-protocol/set.bin diff --git a/tests/unit/libstore/data/serve-protocol/store-path.bin b/src/libstore-tests/data/serve-protocol/store-path.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/store-path.bin rename to src/libstore-tests/data/serve-protocol/store-path.bin diff --git a/tests/unit/libstore/data/serve-protocol/string.bin b/src/libstore-tests/data/serve-protocol/string.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/string.bin rename to src/libstore-tests/data/serve-protocol/string.bin diff --git a/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin rename to src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.bin diff --git a/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.4.bin b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.4.bin rename to src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.bin diff --git a/tests/unit/libstore/data/serve-protocol/vector.bin b/src/libstore-tests/data/serve-protocol/vector.bin similarity index 100% rename from tests/unit/libstore/data/serve-protocol/vector.bin rename to src/libstore-tests/data/serve-protocol/vector.bin diff --git a/tests/unit/libstore/data/store-reference/auto.txt b/src/libstore-tests/data/store-reference/auto.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/auto.txt rename to src/libstore-tests/data/store-reference/auto.txt diff --git a/tests/unit/libstore/data/store-reference/auto_param.txt b/src/libstore-tests/data/store-reference/auto_param.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/auto_param.txt rename to src/libstore-tests/data/store-reference/auto_param.txt diff --git a/tests/unit/libstore/data/store-reference/local_1.txt b/src/libstore-tests/data/store-reference/local_1.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/local_1.txt rename to src/libstore-tests/data/store-reference/local_1.txt diff --git a/tests/unit/libstore/data/store-reference/local_2.txt b/src/libstore-tests/data/store-reference/local_2.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/local_2.txt rename to src/libstore-tests/data/store-reference/local_2.txt diff --git a/tests/unit/libstore/data/store-reference/local_shorthand_1.txt b/src/libstore-tests/data/store-reference/local_shorthand_1.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/local_shorthand_1.txt rename to src/libstore-tests/data/store-reference/local_shorthand_1.txt diff --git a/tests/unit/libstore/data/store-reference/local_shorthand_2.txt b/src/libstore-tests/data/store-reference/local_shorthand_2.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/local_shorthand_2.txt rename to src/libstore-tests/data/store-reference/local_shorthand_2.txt diff --git a/tests/unit/libstore/data/store-reference/ssh.txt b/src/libstore-tests/data/store-reference/ssh.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/ssh.txt rename to src/libstore-tests/data/store-reference/ssh.txt diff --git a/tests/unit/libstore/data/store-reference/unix.txt b/src/libstore-tests/data/store-reference/unix.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/unix.txt rename to src/libstore-tests/data/store-reference/unix.txt diff --git a/tests/unit/libstore/data/store-reference/unix_shorthand.txt b/src/libstore-tests/data/store-reference/unix_shorthand.txt similarity index 100% rename from tests/unit/libstore/data/store-reference/unix_shorthand.txt rename to src/libstore-tests/data/store-reference/unix_shorthand.txt diff --git a/tests/unit/libstore/data/worker-protocol/build-mode.bin b/src/libstore-tests/data/worker-protocol/build-mode.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/build-mode.bin rename to src/libstore-tests/data/worker-protocol/build-mode.bin diff --git a/tests/unit/libstore/data/worker-protocol/build-result-1.27.bin b/src/libstore-tests/data/worker-protocol/build-result-1.27.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/build-result-1.27.bin rename to src/libstore-tests/data/worker-protocol/build-result-1.27.bin diff --git a/tests/unit/libstore/data/worker-protocol/build-result-1.28.bin b/src/libstore-tests/data/worker-protocol/build-result-1.28.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/build-result-1.28.bin rename to src/libstore-tests/data/worker-protocol/build-result-1.28.bin diff --git a/tests/unit/libstore/data/worker-protocol/build-result-1.29.bin b/src/libstore-tests/data/worker-protocol/build-result-1.29.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/build-result-1.29.bin rename to src/libstore-tests/data/worker-protocol/build-result-1.29.bin diff --git a/tests/unit/libstore/data/worker-protocol/build-result-1.37.bin b/src/libstore-tests/data/worker-protocol/build-result-1.37.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/build-result-1.37.bin rename to src/libstore-tests/data/worker-protocol/build-result-1.37.bin diff --git a/tests/unit/libstore/data/worker-protocol/client-handshake-info_1_30.bin b/src/libstore-tests/data/worker-protocol/client-handshake-info_1_30.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/client-handshake-info_1_30.bin rename to src/libstore-tests/data/worker-protocol/client-handshake-info_1_30.bin diff --git a/tests/unit/libstore/data/worker-protocol/client-handshake-info_1_33.bin b/src/libstore-tests/data/worker-protocol/client-handshake-info_1_33.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/client-handshake-info_1_33.bin rename to src/libstore-tests/data/worker-protocol/client-handshake-info_1_33.bin diff --git a/tests/unit/libstore/data/worker-protocol/client-handshake-info_1_35.bin b/src/libstore-tests/data/worker-protocol/client-handshake-info_1_35.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/client-handshake-info_1_35.bin rename to src/libstore-tests/data/worker-protocol/client-handshake-info_1_35.bin diff --git a/tests/unit/libstore/data/worker-protocol/content-address.bin b/src/libstore-tests/data/worker-protocol/content-address.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/content-address.bin rename to src/libstore-tests/data/worker-protocol/content-address.bin diff --git a/tests/unit/libstore/data/worker-protocol/derived-path-1.29.bin b/src/libstore-tests/data/worker-protocol/derived-path-1.29.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/derived-path-1.29.bin rename to src/libstore-tests/data/worker-protocol/derived-path-1.29.bin diff --git a/tests/unit/libstore/data/worker-protocol/derived-path-1.30.bin b/src/libstore-tests/data/worker-protocol/derived-path-1.30.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/derived-path-1.30.bin rename to src/libstore-tests/data/worker-protocol/derived-path-1.30.bin diff --git a/tests/unit/libstore/data/worker-protocol/drv-output.bin b/src/libstore-tests/data/worker-protocol/drv-output.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/drv-output.bin rename to src/libstore-tests/data/worker-protocol/drv-output.bin diff --git a/tests/unit/libstore/data/worker-protocol/handshake-to-client.bin b/src/libstore-tests/data/worker-protocol/handshake-to-client.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/handshake-to-client.bin rename to src/libstore-tests/data/worker-protocol/handshake-to-client.bin diff --git a/tests/unit/libstore/data/worker-protocol/keyed-build-result-1.29.bin b/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/keyed-build-result-1.29.bin rename to src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.bin diff --git a/tests/unit/libstore/data/worker-protocol/optional-content-address.bin b/src/libstore-tests/data/worker-protocol/optional-content-address.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/optional-content-address.bin rename to src/libstore-tests/data/worker-protocol/optional-content-address.bin diff --git a/tests/unit/libstore/data/worker-protocol/optional-store-path.bin b/src/libstore-tests/data/worker-protocol/optional-store-path.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/optional-store-path.bin rename to src/libstore-tests/data/worker-protocol/optional-store-path.bin diff --git a/tests/unit/libstore/data/worker-protocol/optional-trusted-flag.bin b/src/libstore-tests/data/worker-protocol/optional-trusted-flag.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/optional-trusted-flag.bin rename to src/libstore-tests/data/worker-protocol/optional-trusted-flag.bin diff --git a/tests/unit/libstore/data/worker-protocol/realisation.bin b/src/libstore-tests/data/worker-protocol/realisation.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/realisation.bin rename to src/libstore-tests/data/worker-protocol/realisation.bin diff --git a/tests/unit/libstore/data/worker-protocol/set.bin b/src/libstore-tests/data/worker-protocol/set.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/set.bin rename to src/libstore-tests/data/worker-protocol/set.bin diff --git a/tests/unit/libstore/data/worker-protocol/store-path.bin b/src/libstore-tests/data/worker-protocol/store-path.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/store-path.bin rename to src/libstore-tests/data/worker-protocol/store-path.bin diff --git a/tests/unit/libstore/data/worker-protocol/string.bin b/src/libstore-tests/data/worker-protocol/string.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/string.bin rename to src/libstore-tests/data/worker-protocol/string.bin diff --git a/tests/unit/libstore/data/worker-protocol/unkeyed-valid-path-info-1.15.bin b/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/unkeyed-valid-path-info-1.15.bin rename to src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.bin diff --git a/tests/unit/libstore/data/worker-protocol/valid-path-info-1.15.bin b/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/valid-path-info-1.15.bin rename to src/libstore-tests/data/worker-protocol/valid-path-info-1.15.bin diff --git a/tests/unit/libstore/data/worker-protocol/valid-path-info-1.16.bin b/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/valid-path-info-1.16.bin rename to src/libstore-tests/data/worker-protocol/valid-path-info-1.16.bin diff --git a/tests/unit/libstore/data/worker-protocol/vector.bin b/src/libstore-tests/data/worker-protocol/vector.bin similarity index 100% rename from tests/unit/libstore/data/worker-protocol/vector.bin rename to src/libstore-tests/data/worker-protocol/vector.bin diff --git a/tests/unit/libstore/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc similarity index 98% rename from tests/unit/libstore/derivation-advanced-attrs.cc rename to src/libstore-tests/derivation-advanced-attrs.cc index 26cf947a8..9d2c64ef3 100644 --- a/tests/unit/libstore/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -1,4 +1,3 @@ -#include #include #include @@ -9,6 +8,7 @@ #include "tests/characterization.hh" #include "parsed-derivations.hh" #include "types.hh" +#include "json-utils.hh" namespace nix { @@ -16,12 +16,12 @@ using nlohmann::json; class DerivationAdvancedAttrsTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/derivation"; + std::filesystem::path unitTestData = getUnitTestData() / "derivation"; public: - Path goldenMaster(std::string_view testStem) const override + std::filesystem::path goldenMaster(std::string_view testStem) const override { - return unitTestData + "/" + testStem; + return unitTestData / testStem; } }; diff --git a/tests/unit/libstore/derivation.cc b/src/libstore-tests/derivation.cc similarity index 98% rename from tests/unit/libstore/derivation.cc rename to src/libstore-tests/derivation.cc index 71979f885..14652921a 100644 --- a/tests/unit/libstore/derivation.cc +++ b/src/libstore-tests/derivation.cc @@ -13,11 +13,11 @@ using nlohmann::json; class DerivationTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/derivation"; + std::filesystem::path unitTestData = getUnitTestData() / "derivation"; public: - Path goldenMaster(std::string_view testStem) const override { - return unitTestData + "/" + testStem; + std::filesystem::path goldenMaster(std::string_view testStem) const override { + return unitTestData / testStem; } /** diff --git a/tests/unit/libstore/derived-path.cc b/src/libstore-tests/derived-path.cc similarity index 100% rename from tests/unit/libstore/derived-path.cc rename to src/libstore-tests/derived-path.cc diff --git a/tests/unit/libstore/downstream-placeholder.cc b/src/libstore-tests/downstream-placeholder.cc similarity index 100% rename from tests/unit/libstore/downstream-placeholder.cc rename to src/libstore-tests/downstream-placeholder.cc diff --git a/tests/unit/libstore/http-binary-cache-store.cc b/src/libstore-tests/http-binary-cache-store.cc similarity index 100% rename from tests/unit/libstore/http-binary-cache-store.cc rename to src/libstore-tests/http-binary-cache-store.cc diff --git a/tests/unit/libstore/legacy-ssh-store.cc b/src/libstore-tests/legacy-ssh-store.cc similarity index 100% rename from tests/unit/libstore/legacy-ssh-store.cc rename to src/libstore-tests/legacy-ssh-store.cc diff --git a/tests/unit/libstore/local-binary-cache-store.cc b/src/libstore-tests/local-binary-cache-store.cc similarity index 100% rename from tests/unit/libstore/local-binary-cache-store.cc rename to src/libstore-tests/local-binary-cache-store.cc diff --git a/tests/unit/libstore/local-overlay-store.cc b/src/libstore-tests/local-overlay-store.cc similarity index 100% rename from tests/unit/libstore/local-overlay-store.cc rename to src/libstore-tests/local-overlay-store.cc diff --git a/tests/unit/libstore/local-store.cc b/src/libstore-tests/local-store.cc similarity index 100% rename from tests/unit/libstore/local-store.cc rename to src/libstore-tests/local-store.cc diff --git a/src/libstore-tests/local.mk b/src/libstore-tests/local.mk new file mode 100644 index 000000000..b565ff0be --- /dev/null +++ b/src/libstore-tests/local.mk @@ -0,0 +1,38 @@ +check: libstore-tests_RUN + +programs += libstore-tests + +libstore-tests_NAME = libnixstore-tests + +libstore-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libstore-tests.xml + +libstore-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libstore-tests_INSTALL_DIR := $(checkbindir) +else + libstore-tests_INSTALL_DIR := +endif + +libstore-tests_SOURCES := $(wildcard $(d)/*.cc) + +libstore-tests_EXTRA_INCLUDES = \ + -I src/libstore-test-support \ + -I src/libutil-test-support \ + $(INCLUDE_libstore) \ + $(INCLUDE_libstorec) \ + $(INCLUDE_libutil) \ + $(INCLUDE_libutilc) + +libstore-tests_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES) + +libstore-tests_LIBS = \ + libstore-test-support libutil-test-support \ + libstore libstorec libutil libutilc + +libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) + +ifdef HOST_WINDOWS + # Increase the default reserved stack size to 65 MB so Nix doesn't run out of space + libstore-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024))) +endif diff --git a/tests/unit/libstore/machines.cc b/src/libstore-tests/machines.cc similarity index 91% rename from tests/unit/libstore/machines.cc rename to src/libstore-tests/machines.cc index 2307f4d62..2d66e9534 100644 --- a/tests/unit/libstore/machines.cc +++ b/src/libstore-tests/machines.cc @@ -13,6 +13,8 @@ using testing::Eq; using testing::Field; using testing::SizeIs; +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; TEST(machines, getMachinesWithEmptyBuilders) { @@ -135,10 +137,10 @@ TEST(machines, getMachinesWithIncorrectFormat) { } TEST(machines, getMachinesWithCorrectFileReference) { - auto path = absPath(getUnitTestData() + "/machines/valid"); - ASSERT_TRUE(pathExists(path)); + auto path = fs::weakly_canonical(getUnitTestData() / "machines/valid"); + ASSERT_TRUE(fs::exists(path)); - auto actual = Machine::parseConfig({}, "@" + path); + auto actual = Machine::parseConfig({}, "@" + path.string()); ASSERT_THAT(actual, SizeIs(3)); EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@scratchy.labs.cs.uu.nl")))); EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, AuthorityMatches("nix@itchy.labs.cs.uu.nl")))); @@ -146,20 +148,22 @@ TEST(machines, getMachinesWithCorrectFileReference) { } TEST(machines, getMachinesWithCorrectFileReferenceToEmptyFile) { - auto path = "/dev/null"; - ASSERT_TRUE(pathExists(path)); + fs::path path = "/dev/null"; + ASSERT_TRUE(fs::exists(path)); - auto actual = Machine::parseConfig({}, std::string{"@"} + path); + auto actual = Machine::parseConfig({}, "@" + path.string()); ASSERT_THAT(actual, SizeIs(0)); } TEST(machines, getMachinesWithIncorrectFileReference) { - auto actual = Machine::parseConfig({}, "@" + absPath("/not/a/file")); + auto path = fs::weakly_canonical("/not/a/file"); + ASSERT_TRUE(!fs::exists(path)); + auto actual = Machine::parseConfig({}, "@" + path.string()); ASSERT_THAT(actual, SizeIs(0)); } TEST(machines, getMachinesWithCorrectFileReferenceToIncorrectFile) { EXPECT_THROW( - Machine::parseConfig({}, "@" + absPath(getUnitTestData() + "/machines/bad_format")), + Machine::parseConfig({}, "@" + fs::weakly_canonical(getUnitTestData() / "machines" / "bad_format").string()), FormatError); } diff --git a/tests/unit/libstore/meson.build b/src/libstore-tests/meson.build similarity index 98% rename from tests/unit/libstore/meson.build rename to src/libstore-tests/meson.build index 3b36cd62f..f4f67d73a 100644 --- a/tests/unit/libstore/meson.build +++ b/src/libstore-tests/meson.build @@ -28,6 +28,7 @@ subdir('build-utils-meson/subprojects') subdir('build-utils-meson/threads') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') sqlite = dependency('sqlite3', 'sqlite', version : '>=3.6.19') deps_private += sqlite diff --git a/tests/unit/libstore/nar-info-disk-cache.cc b/src/libstore-tests/nar-info-disk-cache.cc similarity index 100% rename from tests/unit/libstore/nar-info-disk-cache.cc rename to src/libstore-tests/nar-info-disk-cache.cc diff --git a/tests/unit/libstore/nar-info.cc b/src/libstore-tests/nar-info.cc similarity index 94% rename from tests/unit/libstore/nar-info.cc rename to src/libstore-tests/nar-info.cc index a6cb62de4..0d155743d 100644 --- a/tests/unit/libstore/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -13,10 +13,10 @@ using nlohmann::json; class NarInfoTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/nar-info"; + std::filesystem::path unitTestData = getUnitTestData() / "nar-info"; - Path goldenMaster(PathView testStem) const override { - return unitTestData + "/" + testStem + ".json"; + std::filesystem::path goldenMaster(PathView testStem) const override { + return unitTestData / (testStem + ".json"); } }; diff --git a/tests/unit/libstore/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc similarity index 100% rename from tests/unit/libstore/nix_api_store.cc rename to src/libstore-tests/nix_api_store.cc diff --git a/tests/unit/libstore/outputs-spec.cc b/src/libstore-tests/outputs-spec.cc similarity index 100% rename from tests/unit/libstore/outputs-spec.cc rename to src/libstore-tests/outputs-spec.cc diff --git a/tests/unit/libstore/package.nix b/src/libstore-tests/package.nix similarity index 78% rename from tests/unit/libstore/package.nix rename to src/libstore-tests/package.nix index 7560a5b79..3704d8c5c 100644 --- a/tests/unit/libstore/package.nix +++ b/src/libstore-tests/package.nix @@ -1,12 +1,7 @@ { lib , buildPackages , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonExecutable , nix-store , nix-store-c @@ -27,15 +22,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonExecutable (finalAttrs: { pname = "nix-store-tests"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -43,27 +38,25 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - - buildInputs = [ - nix-store - nix-store-c - nix-store-test-support + # Hack for sake of the dev shell + passthru.externalBuildInputs = [ sqlite rapidcheck gtest ]; + buildInputs = finalAttrs.passthru.externalBuildInputs ++ [ + nix-store + nix-store-c + nix-store-test-support + ]; + preConfigure = # "Inline" .version so it's not a symlink, and includes the suffix. # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -73,10 +66,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru = { tests = { run = let @@ -86,7 +75,7 @@ mkMesonDerivation (finalAttrs: { root = ../..; fileset = lib.fileset.unions [ ./data - ../../functional/derivation + ../../tests/functional/derivation ]; }; in runCommand "${finalAttrs.pname}-run" { @@ -95,7 +84,7 @@ mkMesonDerivation (finalAttrs: { export HOME="$PWD/home-dir" mkdir -p "$HOME" '' + '' - export _NIX_TEST_UNIT_DATA=${data + "/unit/libstore/data"} + export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} ${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} touch $out ''); diff --git a/tests/unit/libstore/path-info.cc b/src/libstore-tests/path-info.cc similarity index 94% rename from tests/unit/libstore/path-info.cc rename to src/libstore-tests/path-info.cc index 9e9c6303d..d6c4c2a7f 100644 --- a/tests/unit/libstore/path-info.cc +++ b/src/libstore-tests/path-info.cc @@ -12,10 +12,10 @@ using nlohmann::json; class PathInfoTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/path-info"; + std::filesystem::path unitTestData = getUnitTestData() / "path-info"; - Path goldenMaster(PathView testStem) const override { - return unitTestData + "/" + testStem + ".json"; + std::filesystem::path goldenMaster(PathView testStem) const override { + return unitTestData / (testStem + ".json"); } }; diff --git a/tests/unit/libstore/path.cc b/src/libstore-tests/path.cc similarity index 100% rename from tests/unit/libstore/path.cc rename to src/libstore-tests/path.cc diff --git a/tests/unit/libstore/references.cc b/src/libstore-tests/references.cc similarity index 100% rename from tests/unit/libstore/references.cc rename to src/libstore-tests/references.cc diff --git a/tests/unit/libstore/s3-binary-cache-store.cc b/src/libstore-tests/s3-binary-cache-store.cc similarity index 100% rename from tests/unit/libstore/s3-binary-cache-store.cc rename to src/libstore-tests/s3-binary-cache-store.cc diff --git a/tests/unit/libstore/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc similarity index 97% rename from tests/unit/libstore/serve-protocol.cc rename to src/libstore-tests/serve-protocol.cc index 2505c5a9a..5171fea0f 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -459,21 +459,14 @@ TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { for (size_t len = 0; len < toClientLog.size(); ++len) { NullBufferedSink nullSink; - StringSource in { - // truncate - toClientLog.substr(0, len) - }; + auto substring = toClientLog.substr(0, len); + StringSource in{substring}; if (len < 8) { EXPECT_THROW( - ServeProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, "blah"), - EndOfFile); + ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"), EndOfFile); } else { // Not sure why cannot keep on checking for `EndOfFile`. - EXPECT_THROW( - ServeProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, "blah"), - Error); + EXPECT_THROW(ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"), Error); } } }); diff --git a/tests/unit/libstore/ssh-store.cc b/src/libstore-tests/ssh-store.cc similarity index 100% rename from tests/unit/libstore/ssh-store.cc rename to src/libstore-tests/ssh-store.cc diff --git a/tests/unit/libstore/store-reference.cc b/src/libstore-tests/store-reference.cc similarity index 93% rename from tests/unit/libstore/store-reference.cc rename to src/libstore-tests/store-reference.cc index 052cd7bed..d4c42f0fd 100644 --- a/tests/unit/libstore/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -13,11 +13,11 @@ using nlohmann::json; class StoreReferenceTest : public CharacterizationTest, public LibStoreTest { - Path unitTestData = getUnitTestData() + "/store-reference"; + std::filesystem::path unitTestData = getUnitTestData() / "store-reference"; - Path goldenMaster(PathView testStem) const override + std::filesystem::path goldenMaster(PathView testStem) const override { - return unitTestData + "/" + testStem + ".txt"; + return unitTestData / (testStem + ".txt"); } }; diff --git a/tests/unit/libstore/uds-remote-store.cc b/src/libstore-tests/uds-remote-store.cc similarity index 100% rename from tests/unit/libstore/uds-remote-store.cc rename to src/libstore-tests/uds-remote-store.cc diff --git a/tests/unit/libstore/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc similarity index 98% rename from tests/unit/libstore/worker-protocol.cc rename to src/libstore-tests/worker-protocol.cc index bbea9ed75..1185c37f4 100644 --- a/tests/unit/libstore/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -725,21 +725,14 @@ TEST_F(WorkerProtoTest, handshake_client_truncated_replay_throws) CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { for (size_t len = 0; len < toClientLog.size(); ++len) { NullBufferedSink nullSink; - StringSource in { - // truncate - toClientLog.substr(0, len) - }; + auto substring = toClientLog.substr(0, len); + StringSource in{substring}; if (len < 8) { EXPECT_THROW( - WorkerProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, {}), - EndOfFile); + WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {}), EndOfFile); } else { // Not sure why cannot keep on checking for `EndOfFile`. - EXPECT_THROW( - WorkerProto::BasicClientConnection::handshake( - nullSink, in, defaultVersion, {}), - Error); + EXPECT_THROW(WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {}), Error); } } }); diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index b809e3ffe..34ed16a38 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -90,7 +90,7 @@ DerivationGoal::~DerivationGoal() { /* Careful: we should never ever throw an exception from a destructor. */ - try { closeLogFile(); } catch (...) { ignoreException(); } + try { closeLogFile(); } catch (...) { ignoreExceptionInDestructor(); } } @@ -814,7 +814,7 @@ void replaceValidPath(const Path & storePath, const Path & tmpPath) // attempt to recover movePath(oldPath, storePath); } catch (...) { - ignoreException(); + ignoreExceptionExceptInterrupt(); } throw; } diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 4c1373bfa..3bf22320e 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -66,6 +66,7 @@ std::vector Store::buildPathsWithResults( worker.run(goals); std::vector results; + results.reserve(state.size()); for (auto & [req, goalPtr] : state) results.emplace_back(KeyedBuildResult { diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 0152f1808..315500719 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -183,7 +183,7 @@ Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref sub, /* Make sure that we are allowed to start a substitution. Note that even if maxSubstitutionJobs == 0, we still allow a substituter to run. This prevents infinite waiting. */ - if (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) { + while (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) { worker.waitForBuildSlot(shared_from_this()); co_await Suspend{}; } @@ -294,7 +294,7 @@ void PathSubstitutionGoal::cleanup() outPipe.close(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index ab0ba67b5..dbe86f43f 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -184,13 +184,13 @@ void Worker::wakeUp(GoalPtr goal) } -unsigned Worker::getNrLocalBuilds() +size_t Worker::getNrLocalBuilds() { return nrLocalBuilds; } -unsigned Worker::getNrSubstitutions() +size_t Worker::getNrSubstitutions() { return nrSubstitutions; } diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 33a7bf015..e083dbea6 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -92,12 +92,12 @@ private: * Number of build slots occupied. This includes local builds but does not * include substitutions or remote builds via the build hook. */ - unsigned int nrLocalBuilds; + size_t nrLocalBuilds; /** * Number of substitution slots occupied. */ - unsigned int nrSubstitutions; + size_t nrSubstitutions; /** * Maps used to prevent multiple instantiations of a goal for the @@ -235,12 +235,12 @@ public: * Return the number of local build processes currently running (but not * remote builds via the build hook). */ - unsigned int getNrLocalBuilds(); + size_t getNrLocalBuilds(); /** * Return the number of substitution processes currently running. */ - unsigned int getNrSubstitutions(); + size_t getNrSubstitutions(); /** * Registers a running child process. `inBuildSlot` means that diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index 93558b49e..091946e01 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -9,7 +9,8 @@ namespace nix { void builtinFetchurl( const BasicDerivation & drv, const std::map & outputs, - const std::string & netrcData); + const std::string & netrcData, + const std::string & caFileData); void builtinUnpackChannel( const BasicDerivation & drv, diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index b9dfeba2f..90e58dfdb 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -9,7 +9,8 @@ namespace nix { void builtinFetchurl( const BasicDerivation & drv, const std::map & outputs, - const std::string & netrcData) + const std::string & netrcData, + const std::string & caFileData) { /* Make the host's netrc data available. Too bad curl requires this to be stored in a file. It would be nice if we could just @@ -19,6 +20,9 @@ void builtinFetchurl( writeFile(settings.netrcFile, netrcData, 0600); } + settings.caFile = "ca-certificates.crt"; + writeFile(settings.caFile, caFileData, 0600); + auto out = get(drv.outputs, "out"); if (!out) throw Error("'builtin:fetchurl' requires an 'out' output"); @@ -38,10 +42,7 @@ void builtinFetchurl( auto source = sinkToSource([&](Sink & sink) { - /* No need to do TLS verification, because we check the hash of - the result anyway. */ FileTransferRequest request(url); - request.verifyTLS = false; request.decompress = false; auto decompressor = makeDecompressionSink( diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index a5f2b8e3a..a6369ee1c 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -3,31 +3,53 @@ namespace nix { +namespace fs { using namespace std::filesystem; } + void builtinUnpackChannel( const BasicDerivation & drv, const std::map & outputs) { - auto getAttr = [&](const std::string & name) { + auto getAttr = [&](const std::string & name) -> const std::string & { auto i = drv.env.find(name); if (i == drv.env.end()) throw Error("attribute '%s' missing", name); return i->second; }; - auto out = outputs.at("out"); - auto channelName = getAttr("channelName"); - auto src = getAttr("src"); + fs::path out{outputs.at("out")}; + auto & channelName = getAttr("channelName"); + auto & src = getAttr("src"); - createDirs(out); + if (fs::path{channelName}.filename().string() != channelName) { + throw Error("channelName is not allowed to contain filesystem separators, got %1%", channelName); + } + + try { + fs::create_directories(out); + } catch (fs::filesystem_error &) { + throw SysError("creating directory '%1%'", out.string()); + } unpackTarfile(src, out); - auto entries = std::filesystem::directory_iterator{out}; - auto fileName = entries->path().string(); - auto fileCount = std::distance(std::filesystem::begin(entries), std::filesystem::end(entries)); + size_t fileCount; + std::string fileName; + try { + auto entries = fs::directory_iterator{out}; + fileName = entries->path().string(); + fileCount = std::distance(fs::begin(entries), fs::end(entries)); + } catch (fs::filesystem_error &) { + throw SysError("failed to read directory %1%", out.string()); + } if (fileCount != 1) throw Error("channel tarball '%s' contains more than one file", src); - std::filesystem::rename(fileName, (out + "/" + channelName)); + + auto target = out / channelName; + try { + fs::rename(fileName, target); + } catch (fs::filesystem_error &) { + throw SysError("failed to rename %1% to %2%", fileName, target.string()); + } } } diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index ef3326cd6..b921dbe2d 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -90,11 +90,11 @@ struct TunnelLogger : public Logger { if (ei.level > verbosity) return; - std::stringstream oss; + std::ostringstream oss; showErrorInfo(oss, ei, false); StringSink buf; - buf << STDERR_NEXT << oss.str(); + buf << STDERR_NEXT << toView(oss); enqueueMsg(buf.s); } @@ -402,6 +402,9 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto pathInfo = [&]() { // NB: FramedSource must be out of scope before logger->stopWork(); + // FIXME: this means that if there is an error + // half-way through, the client will keep sending + // data, since we haven't sent it the error yet. auto [contentAddressMethod, hashAlgo] = ContentAddressMethod::parseWithAlgo(camStr); FramedSource source(conn.from); FileSerialisationMethod dumpMethod; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 8f9c71851..9b6f67852 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -7,11 +7,12 @@ #include "split.hh" #include "common-protocol.hh" #include "common-protocol-impl.hh" +#include "strings-inline.hh" +#include "json-utils.hh" + #include #include -#include "strings-inline.hh" - namespace nix { std::optional DerivationOutput::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 58e5328a5..40740d545 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -298,6 +298,10 @@ struct BasicDerivation std::string name; BasicDerivation() = default; + BasicDerivation(BasicDerivation &&) = default; + BasicDerivation(const BasicDerivation &) = default; + BasicDerivation& operator=(BasicDerivation &&) = default; + BasicDerivation& operator=(const BasicDerivation &) = default; virtual ~BasicDerivation() { }; bool isBuiltin() const; diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 5ea8b6f96..42b93cfe0 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -54,6 +54,8 @@ struct curlFileTransfer : public FileTransfer bool done = false; // whether either the success or failure function has been called Callback callback; CURL * req = 0; + // buffer to accompany the `req` above + char errbuf[CURL_ERROR_SIZE]; bool active = false; // whether the handle has been added to the multi object std::string statusMsg; @@ -137,7 +139,7 @@ struct curlFileTransfer : public FileTransfer if (!done) fail(FileTransferError(Interrupted, {}, "download of '%s' was interrupted", request.uri)); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -151,7 +153,7 @@ struct curlFileTransfer : public FileTransfer template void fail(T && e) { - failEx(std::make_exception_ptr(std::move(e))); + failEx(std::make_exception_ptr(std::forward(e))); } LambdaSink finalSink; @@ -370,6 +372,9 @@ struct curlFileTransfer : public FileTransfer if (writtenToSink) curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink); + curl_easy_setopt(req, CURLOPT_ERRORBUFFER, errbuf); + errbuf[0] = 0; + result.data.clear(); result.bodySize = 0; } @@ -484,8 +489,8 @@ struct curlFileTransfer : public FileTransfer code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) : FileTransferError(err, std::move(response), - "unable to %s '%s': %s (%d)", - request.verb(), request.uri, curl_easy_strerror(code), code); + "unable to %s '%s': %s (%d) %s", + request.verb(), request.uri, curl_easy_strerror(code), code, errbuf); /* If this is a transient error, then maybe retry the download after a while. If we're writing to a @@ -754,12 +759,17 @@ struct curlFileTransfer : public FileTransfer S3Helper s3Helper(profile, region, scheme, endpoint); + Activity act(*logger, lvlTalkative, actFileTransfer, + fmt("downloading '%s'", request.uri), + {request.uri}, request.parentAct); + // FIXME: implement ETag auto s3Res = s3Helper.getObject(bucketName, key); FileTransferResult res; if (!s3Res.data) throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri); res.data = std::move(*s3Res.data); + res.urls.push_back(request.uri); callback(std::move(res)); #else throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 1494712da..73195794a 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -333,7 +333,7 @@ static std::string quoteRegexChars(const std::string & raw) } #if __linux__ -static void readFileRoots(const char * path, UncheckedRoots & roots) +static void readFileRoots(const std::filesystem::path & path, UncheckedRoots & roots) { try { roots[readFile(path)].emplace(path); @@ -958,8 +958,8 @@ void LocalStore::autoGC(bool sync) } catch (...) { // FIXME: we could propagate the exception to the - // future, but we don't really care. - ignoreException(); + // future, but we don't really care. (what??) + ignoreExceptionInDestructor(); } }).detach(); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 52ab35b4c..b64e73c26 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -12,6 +12,7 @@ #include #include +#include #include #ifndef _WIN32 @@ -135,7 +136,7 @@ std::vector getUserConfigFiles() std::vector files; auto dirs = getConfigDirs(); for (auto & dir : dirs) { - files.insert(files.end(), dir + "/nix/nix.conf"); + files.insert(files.end(), dir + "/nix.conf"); } return files; } @@ -363,10 +364,21 @@ void initLibStore(bool loadConfig) { preloadNSS(); + /* Because of an objc quirk[1], calling curl_global_init for the first time + after fork() will always result in a crash. + Up until now the solution has been to set OBJC_DISABLE_INITIALIZE_FORK_SAFETY + for every nix process to ignore that error. + Instead of working around that error we address it at the core - + by calling curl_global_init here, which should mean curl will already + have been initialized by the time we try to do so in a forked process. + + [1] https://github.com/apple-oss-distributions/objc4/blob/01edf1705fbc3ff78a423cd21e03dfc21eb4d780/runtime/objc-initialize.mm#L614-L636 + */ + curl_global_init(CURL_GLOBAL_ALL); +#if __APPLE__ /* On macOS, don't use the per-session TMPDIR (as set e.g. by sshd). This breaks build users because they don't have access to the TMPDIR, in particular in ‘nix-store --serve’. */ -#if __APPLE__ if (hasPrefix(defaultTempDir(), "/var/folders/")) unsetenv("TMPDIR"); #endif diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 65c7bf3bc..ff3df46ba 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -399,10 +399,18 @@ public: default is `true`. )"}; + Setting fsyncStorePaths{this, false, "fsync-store-paths", + R"( + Whether to call `fsync()` on store paths before registering them, to + flush them to disk. This improves robustness in case of system crashes, + but reduces performance. The default is `false`. + )"}; + Setting useSQLiteWAL{this, !isWSL1(), "use-sqlite-wal", "Whether SQLite should use WAL mode."}; #ifndef _WIN32 + // FIXME: remove this option, `fsync-store-paths` is faster. Setting syncBeforeRegistering{this, false, "sync-before-registering", "Whether to call `sync()` before registering a path as valid."}; #endif @@ -1196,7 +1204,7 @@ public: If the user is trusted (see `trusted-users` option), when building a fixed-output derivation, environment variables set in this option - will be passed to the builder if they are listed in [`impureEnvVars`](@docroot@/language/advanced-attributes.md##adv-attr-impureEnvVars). + will be passed to the builder if they are listed in [`impureEnvVars`](@docroot@/language/advanced-attributes.md#adv-attr-impureEnvVars). This option is useful for, e.g., setting `https_proxy` for fixed-output derivations and in a multi-user Nix installation, or @@ -1219,14 +1227,13 @@ public: Setting warnLargePathThreshold{ this, - // n.b. this is deliberately int64 max rather than uint64 max because - // this goes through the Nix language JSON parser and thus needs to be - // representable in Nix language integers. - std::numeric_limits::max(), + 0, "warn-large-path-threshold", R"( Warn when copying a path larger than this number of bytes to the Nix store (as determined by its NAR serialisation). + Default is 0, which disables the warning. + Set it to 1 to warn on all paths. )" }; }; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index b15ef4e4c..fc7ac2dea 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -169,28 +169,29 @@ protected: { try { checkEnabled(); + + auto request(makeRequest(path)); + + auto callbackPtr = std::make_shared(std::move(callback)); + + getFileTransfer()->enqueueFileTransfer(request, + {[callbackPtr, this](std::future result) { + try { + (*callbackPtr)(std::move(result.get().data)); + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) + return (*callbackPtr)({}); + maybeDisable(); + callbackPtr->rethrow(); + } catch (...) { + callbackPtr->rethrow(); + } + }}); + } catch (...) { callback.rethrow(); return; } - - auto request(makeRequest(path)); - - auto callbackPtr = std::make_shared(std::move(callback)); - - getFileTransfer()->enqueueFileTransfer(request, - {[callbackPtr, this](std::future result) { - try { - (*callbackPtr)(std::move(result.get().data)); - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return (*callbackPtr)({}); - maybeDisable(); - callbackPtr->rethrow(); - } catch (...) { - callbackPtr->rethrow(); - } - }}); } /** diff --git a/src/libstore/local-overlay-store.cc b/src/libstore/local-overlay-store.cc index ec2c5f4e9..b86beba2c 100644 --- a/src/libstore/local-overlay-store.cc +++ b/src/libstore/local-overlay-store.cc @@ -31,7 +31,7 @@ LocalOverlayStore::LocalOverlayStore(std::string_view scheme, PathView path, con if (checkMount.get()) { std::smatch match; std::string mountInfo; - auto mounts = readFile("/proc/self/mounts"); + auto mounts = readFile(std::filesystem::path{"/proc/self/mounts"}); auto regex = std::regex(R"((^|\n)overlay )" + realStoreDir.get() + R"( .*(\n|$))"); // Mount points can be stacked, so there might be multiple matching entries. diff --git a/src/libstore/local-overlay-store.md b/src/libstore/local-overlay-store.md index 1e1a3d26c..9434ebfb9 100644 --- a/src/libstore/local-overlay-store.md +++ b/src/libstore/local-overlay-store.md @@ -4,7 +4,7 @@ R"( This store type is a variation of the [local store] designed to leverage Linux's [Overlay Filesystem](https://docs.kernel.org/filesystems/overlayfs.html) (OverlayFS for short). Just as OverlayFS combines a lower and upper filesystem by treating the upper one as a patch against the lower, the local overlay store combines a lower store with an upper almost-[local store]. -("almost" because while the upper fileystems for OverlayFS is valid on its own, the upper almost-store is not a valid local store on its own because some references will dangle.) +("almost" because while the upper filesystems for OverlayFS is valid on its own, the upper almost-store is not a valid local store on its own because some references will dangle.) To use this store, you will first need to configure an OverlayFS mountpoint [appropriately](#example-filesystem-layout) as Nix will not do this for you (though it will verify the mountpoint is configured correctly). ### Conceptual parts of a local overlay store @@ -77,13 +77,13 @@ The parts of a local overlay store are as follows: The lower store directory and upper layer directory are combined via OverlayFS to create this directory. Nix doesn't do this itself, because it typically wouldn't have the permissions to do so, so it is the responsibility of the user to set this up first. - Nix can, however, optionally check that that the OverlayFS mount settings appear as expected, matching Nix's own settings. + Nix can, however, optionally check that the OverlayFS mount settings appear as expected, matching Nix's own settings. - **Upper SQLite database**: > Not directly specified. > The location of the database instead depends on the [`state`](#store-experimental-local-overlay-store-state) setting. - > It is is always `${state}/db`. + > It is always `${state}/db`. This contains the metadata of all of the upper layer [store objects][store object] (everything beyond their file system objects), and also duplicate copies of some lower layer store object's metadta. The duplication is so the metadata for the [closure](@docroot@/glossary.md#gloss-closure) of upper layer [store objects][store object] can be found entirely within the upper layer. diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 819cee345..eafdac0cd 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -42,7 +42,6 @@ # include # include # include -# include #endif #ifdef __CYGWIN__ @@ -51,6 +50,8 @@ #include +#include + #include "strings.hh" @@ -94,7 +95,7 @@ struct LocalStore::State::Stmts { SQLiteStmt AddRealisationReference; }; -int getSchema(Path schemaPath) +static int getSchema(Path schemaPath) { int curSchema = 0; if (pathExists(schemaPath)) { @@ -131,61 +132,8 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd) curCASchema = nixCASchemaVersion; } - if (curCASchema < 2) { - SQLiteTxn txn(db); - // Ugly little sql dance to add a new `id` column and make it the primary key - db.exec(R"( - create table Realisations2 ( - id integer primary key autoincrement not null, - drvPath text not null, - outputName text not null, -- symbolic output id, usually "out" - outputPath integer not null, - signatures text, -- space-separated list - foreign key (outputPath) references ValidPaths(id) on delete cascade - ); - insert into Realisations2 (drvPath, outputName, outputPath, signatures) - select drvPath, outputName, outputPath, signatures from Realisations; - drop table Realisations; - alter table Realisations2 rename to Realisations; - )"); - db.exec(R"( - create index if not exists IndexRealisations on Realisations(drvPath, outputName); - - create table if not exists RealisationsRefs ( - referrer integer not null, - realisationReference integer, - foreign key (referrer) references Realisations(id) on delete cascade, - foreign key (realisationReference) references Realisations(id) on delete restrict - ); - )"); - txn.commit(); - } - - if (curCASchema < 3) { - SQLiteTxn txn(db); - // Apply new indices added in this schema update. - db.exec(R"( - -- used by QueryRealisationReferences - create index if not exists IndexRealisationsRefs on RealisationsRefs(referrer); - -- used by cascade deletion when ValidPaths is deleted - create index if not exists IndexRealisationsRefsOnOutputPath on Realisations(outputPath); - )"); - txn.commit(); - } - if (curCASchema < 4) { - SQLiteTxn txn(db); - db.exec(R"( - create trigger if not exists DeleteSelfRefsViaRealisations before delete on ValidPaths - begin - delete from RealisationsRefs where realisationReference in ( - select id from Realisations where outputPath = old.id - ); - end; - -- used by deletion trigger - create index if not exists IndexRealisationsRefsRealisationReference on RealisationsRefs(realisationReference); - )"); - txn.commit(); - } + if (curCASchema < 4) + throw Error("experimental CA schema version %d is no longer supported", curCASchema); writeFile(schemaPath, fmt("%d", nixCASchemaVersion), 0666, true); lockFile(lockFd.get(), ltRead, true); @@ -366,8 +314,6 @@ LocalStore::LocalStore( have performed the upgrade already. */ curSchema = getSchema(); - if (curSchema < 7) { upgradeStore7(); } - openDB(*state, false); if (curSchema < 8) { @@ -525,7 +471,7 @@ LocalStore::~LocalStore() unlink(fnTempRoots.c_str()); } } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -1099,103 +1045,114 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (checkSigs && pathInfoIsUntrusted(info)) throw Error("cannot add path '%s' because it lacks a signature by a trusted key", printStorePath(info.path)); - /* In case we are not interested in reading the NAR: discard it. */ - bool narRead = false; - Finally cleanup = [&]() { - if (!narRead) { - NullFileSystemObjectSink sink; - try { - parseDump(sink, source); - } catch (...) { - ignoreException(); + { + /* In case we are not interested in reading the NAR: discard it. */ + bool narRead = false; + Finally cleanup = [&]() { + if (!narRead) { + NullFileSystemObjectSink sink; + try { + parseDump(sink, source); + } catch (...) { + // TODO: should Interrupted be handled here? + ignoreExceptionInDestructor(); + } } - } - }; + }; - addTempRoot(info.path); - - if (repair || !isValidPath(info.path)) { - - PathLocks outputLock; - - auto realPath = Store::toRealPath(info.path); - - /* Lock the output path. But don't lock if we're being called - from a build hook (whose parent process already acquired a - lock on this path). */ - if (!locksHeld.count(printStorePath(info.path))) - outputLock.lockPaths({realPath}); + addTempRoot(info.path); if (repair || !isValidPath(info.path)) { - deletePath(realPath); + PathLocks outputLock; - /* While restoring the path from the NAR, compute the hash - of the NAR. */ - HashSink hashSink(HashAlgorithm::SHA256); + auto realPath = Store::toRealPath(info.path); - TeeSource wrapperSource { source, hashSink }; + /* Lock the output path. But don't lock if we're being called + from a build hook (whose parent process already acquired a + lock on this path). */ + if (!locksHeld.count(printStorePath(info.path))) + outputLock.lockPaths({realPath}); - narRead = true; - restorePath(realPath, wrapperSource); + if (repair || !isValidPath(info.path)) { - auto hashResult = hashSink.finish(); + deletePath(realPath); - if (hashResult.first != info.narHash) - throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true)); + /* While restoring the path from the NAR, compute the hash + of the NAR. */ + HashSink hashSink(HashAlgorithm::SHA256); - if (hashResult.second != info.narSize) - throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narSize, hashResult.second); + TeeSource wrapperSource { source, hashSink }; - if (info.ca) { - auto & specified = *info.ca; - auto actualHash = ({ - auto accessor = getFSAccessor(false); - CanonPath path { printStorePath(info.path) }; - Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ - auto fim = specified.method.getFileIngestionMethod(); - switch (fim) { - case FileIngestionMethod::Flat: - case FileIngestionMethod::NixArchive: - { - HashModuloSink caSink { - specified.hash.algo, - std::string { info.path.hashPart() }, + narRead = true; + restorePath(realPath, wrapperSource, settings.fsyncStorePaths); + + auto hashResult = hashSink.finish(); + + if (hashResult.first != info.narHash) + throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", + printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true)); + + if (hashResult.second != info.narSize) + throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", + printStorePath(info.path), info.narSize, hashResult.second); + + if (info.ca) { + auto & specified = *info.ca; + auto actualHash = ({ + auto accessor = getFSAccessor(false); + CanonPath path { printStorePath(info.path) }; + Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ + auto fim = specified.method.getFileIngestionMethod(); + switch (fim) { + case FileIngestionMethod::Flat: + case FileIngestionMethod::NixArchive: + { + HashModuloSink caSink { + specified.hash.algo, + std::string { info.path.hashPart() }, + }; + dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim); + h = caSink.finish().first; + break; + } + case FileIngestionMethod::Git: + h = git::dumpHash(specified.hash.algo, {accessor, path}).hash; + break; + } + ContentAddress { + .method = specified.method, + .hash = std::move(h), }; - dumpPath({accessor, path}, caSink, (FileSerialisationMethod) fim); - h = caSink.finish().first; - break; + }); + if (specified.hash != actualHash.hash) { + throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", + printStorePath(info.path), + specified.hash.to_string(HashFormat::Nix32, true), + actualHash.hash.to_string(HashFormat::Nix32, true)); } - case FileIngestionMethod::Git: - h = git::dumpHash(specified.hash.algo, {accessor, path}).hash; - break; - } - ContentAddress { - .method = specified.method, - .hash = std::move(h), - }; - }); - if (specified.hash != actualHash.hash) { - throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), - specified.hash.to_string(HashFormat::Nix32, true), - actualHash.hash.to_string(HashFormat::Nix32, true)); } + + autoGC(); + + canonicalisePathMetaData(realPath); + + optimisePath(realPath, repair); // FIXME: combine with hashPath() + + if (settings.fsyncStorePaths) { + recursiveSync(realPath); + syncParent(realPath); + } + + registerValidPath(info); } - autoGC(); - - canonicalisePathMetaData(realPath); - - optimisePath(realPath, repair); // FIXME: combine with hashPath() - - registerValidPath(info); + outputLock.setDeletion(true); } - - outputLock.setDeletion(true); } + + // In case `cleanup` ignored an `Interrupted` exception + checkInterrupt(); } @@ -1271,7 +1228,7 @@ StorePath LocalStore::addToStoreFromDump( delTempDir = std::make_unique(tempDir); tempPath = tempDir / "x"; - restorePath(tempPath.string(), bothSource, dumpMethod); + restorePath(tempPath.string(), bothSource, dumpMethod, settings.fsyncStorePaths); dumpBuffer.reset(); dump = {}; @@ -1318,7 +1275,7 @@ StorePath LocalStore::addToStoreFromDump( switch (fim) { case FileIngestionMethod::Flat: case FileIngestionMethod::NixArchive: - restorePath(realPath, dumpSource, (FileSerialisationMethod) fim); + restorePath(realPath, dumpSource, (FileSerialisationMethod) fim, settings.fsyncStorePaths); break; case FileIngestionMethod::Git: // doesn't correspond to serialization method, so @@ -1343,6 +1300,11 @@ StorePath LocalStore::addToStoreFromDump( optimisePath(realPath, repair); + if (settings.fsyncStorePaths) { + recursiveSync(realPath); + syncParent(realPath); + } + ValidPathInfo info { *this, name, @@ -1596,62 +1558,6 @@ std::optional LocalStore::isTrustedClient() } -#if defined(FS_IOC_SETFLAGS) && defined(FS_IOC_GETFLAGS) && defined(FS_IMMUTABLE_FL) - -static void makeMutable(const Path & path) -{ - checkInterrupt(); - - auto st = lstat(path); - - if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return; - - if (S_ISDIR(st.st_mode)) { - for (auto & i : readDirectory(path)) - makeMutable(path + "/" + i.name); - } - - /* The O_NOFOLLOW is important to prevent us from changing the - mutable bit on the target of a symlink (which would be a - security hole). */ - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_NOFOLLOW -#ifndef _WIN32 - | O_CLOEXEC -#endif - ); - if (fd == INVALID_DESCRIPTOR) { - if (errno == ELOOP) return; // it's a symlink - throw SysError("opening file '%1%'", path); - } - - unsigned int flags = 0, old; - - /* Silently ignore errors getting/setting the immutable flag so - that we work correctly on filesystems that don't support it. */ - if (ioctl(fd, FS_IOC_GETFLAGS, &flags)) return; - old = flags; - flags &= ~FS_IMMUTABLE_FL; - if (old == flags) return; - if (ioctl(fd, FS_IOC_SETFLAGS, &flags)) return; -} - -/* Upgrade from schema 6 (Nix 0.15) to schema 7 (Nix >= 1.3). */ -void LocalStore::upgradeStore7() -{ - if (!isRootUser()) return; - printInfo("removing immutable bits from the Nix store (this may take a while)..."); - makeMutable(realStoreDir); -} - -#else - -void LocalStore::upgradeStore7() -{ -} - -#endif - - void LocalStore::vacuumDB() { auto state(_state.lock()); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index a03cfc03b..21848cc4d 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -373,8 +373,6 @@ private: void updatePathInfo(State & state, const ValidPathInfo & info); - void upgradeStore6(); - void upgradeStore7(); PathSet queryValidPathsOld(); ValidPathInfo queryPathInfoOld(const Path & path); diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 88be6a366..43d8993ba 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -59,7 +59,7 @@ NIX_ROOT = endif # Prefix all but `NIX_STORE_DIR`, since we aren't doing a local store -# yet so a "logical" store dir that is the same as unix is prefered. +# yet so a "logical" store dir that is the same as unix is preferred. # # Also, it keeps the unit tests working. diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 256cf9188..eb729b697 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -33,7 +33,7 @@ Machine::Machine( systemTypes(systemTypes), sshKey(sshKey), maxJobs(maxJobs), - speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)), + speedFactor(speedFactor == 0.0f ? 1.0f : speedFactor), supportedFeatures(supportedFeatures), mandatoryFeatures(mandatoryFeatures), sshPublicHostKey(sshPublicHostKey) @@ -159,8 +159,9 @@ static Machine parseBuilderLine(const std::set & defaultSystems, co const auto & str = tokens[fieldIndex]; try { base64Decode(str); - } catch (const Error & e) { - throw FormatError("bad machine specification: a column #%lu in a row: '%s' is not valid base64 string: %s", fieldIndex, line, e.what()); + } catch (FormatError & e) { + e.addTrace({}, "while parsing machine specification at a column #%lu in a row: '%s'", fieldIndex, line); + throw; } return str; }; diff --git a/src/libstore/meson.build b/src/libstore/meson.build index 8e30845e1..101879e90 100644 --- a/src/libstore/meson.build +++ b/src/libstore/meson.build @@ -7,6 +7,7 @@ project('nix-store', 'cpp', 'debug=true', 'optimization=2', 'errorlogs=true', # Please print logs for tests that fail + 'localstatedir=/nix/var', ], meson_version : '>= 1.1', license : 'LGPL-2.1-or-later', @@ -33,6 +34,8 @@ subdir('build-utils-meson/subprojects') run_command('ln', '-s', meson.project_build_root() / '__nothing_link_target', meson.project_build_root() / '__nothing_symlink', + # native doesn't allow dangling symlinks, which the tests require + env : { 'MSYS' : 'winsymlinks:lnk' }, check : true, ) can_link_symlink = run_command('ln', @@ -68,6 +71,17 @@ has_acl_support = cxx.has_header('sys/xattr.h') \ and cxx.has_function('lremovexattr') configdata.set('HAVE_ACL_SUPPORT', has_acl_support.to_int()) +if host_machine.system() == 'darwin' + sandbox = cxx.find_library('sandbox') + deps_other += [sandbox] +endif + +if host_machine.system() == 'windows' + wsock32 = cxx.find_library('wsock32') + deps_other += [wsock32] +endif + +subdir('build-utils-meson/libatomic') subdir('build-utils-meson/threads') boost = dependency( @@ -324,7 +338,7 @@ fs = import('fs') prefix = get_option('prefix') # For each of these paths, assume that it is relative to the prefix unless -# it is already an absolute path (which is the default for store-dir, state-dir, and log-dir). +# it is already an absolute path (which is the default for store-dir, localstatedir, and log-dir). path_opts = [ # Meson built-ins. 'datadir', @@ -334,13 +348,13 @@ path_opts = [ 'libexecdir', # Homecooked Nix directories. 'store-dir', - 'state-dir', + 'localstatedir', 'log-dir', ] # For your grepping pleasure, this loop sets the following variables that aren't mentioned # literally above: # store_dir -# state_dir +# localstatedir # log_dir # profile_dir foreach optname : path_opts @@ -364,12 +378,12 @@ lsof = find_program('lsof', required : false) # Aside from prefix itself, each of these was made into an absolute path # by joining it with prefix, unless it was already an absolute path -# (which is the default for store-dir, state-dir, and log-dir). +# (which is the default for store-dir, localstatedir, and log-dir). cpp_str_defines = { 'NIX_PREFIX': prefix, 'NIX_STORE_DIR': store_dir, 'NIX_DATA_DIR': datadir, - 'NIX_STATE_DIR': state_dir / 'nix', + 'NIX_STATE_DIR': localstatedir / 'nix', 'NIX_LOG_DIR': log_dir, 'NIX_CONF_DIR': sysconfdir / 'nix', 'NIX_MAN_DIR': mandir, @@ -404,6 +418,7 @@ foreach name, value : cpp_str_defines endforeach subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nixstore', @@ -421,4 +436,15 @@ install_headers(headers, subdir : 'nix', preserve_path : true) libraries_private = [] +extra_pkg_config_variables = { + 'storedir' : get_option('store-dir'), +} + +# Working around https://github.com/mesonbuild/meson/issues/13584 +if host_machine.system() != 'darwin' + extra_pkg_config_variables += { + 'localstatedir' : get_option('localstatedir'), + } +endif + subdir('build-utils-meson/export') diff --git a/src/libstore/meson.options b/src/libstore/meson.options index 723a8e020..ebad24dc4 100644 --- a/src/libstore/meson.options +++ b/src/libstore/meson.options @@ -16,10 +16,6 @@ option('store-dir', type : 'string', value : '/nix/store', description : 'path of the Nix store', ) -option('state-dir', type : 'string', value : '/nix/var', - description : 'path to store state in for Nix', -) - option('log-dir', type : 'string', value : '/nix/var/log/nix', description : 'path to store logs in for Nix', ) diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 83e63794e..80e8d3414 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -87,7 +87,7 @@ public: Sync _state; - NarInfoDiskCacheImpl(Path dbPath = getCacheDir() + "/nix/binary-cache-v6.sqlite") + NarInfoDiskCacheImpl(Path dbPath = getCacheDir() + "/binary-cache-v6.sqlite") { auto state(_state.lock()); diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 2442a7b09..8b2557060 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -2,6 +2,7 @@ #include "nar-info.hh" #include "store-api.hh" #include "strings.hh" +#include "json-utils.hh" namespace nix { diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 9d903f218..aeff24c64 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -35,7 +35,7 @@ struct MakeReadOnly /* This will make the path read-only. */ if (path != "") canonicaliseTimestampAndPermissions(path); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } }; diff --git a/src/libstore/outputs-spec.cc b/src/libstore/outputs-spec.cc index 86788a87e..f5ecbd74b 100644 --- a/src/libstore/outputs-spec.cc +++ b/src/libstore/outputs-spec.cc @@ -30,16 +30,15 @@ std::optional OutputsSpec::parseOpt(std::string_view s) { static std::regex regex(std::string { outputSpecRegexStr }); - std::smatch match; - std::string s2 { s }; // until some improves std::regex - if (!std::regex_match(s2, match, regex)) + std::cmatch match; + if (!std::regex_match(s.cbegin(), s.cend(), match, regex)) return std::nullopt; if (match[1].matched) return { OutputsSpec::All {} }; if (match[2].matched) - return OutputsSpec::Names { tokenizeString(match[2].str(), ",") }; + return OutputsSpec::Names { tokenizeString({match[2].first, match[2].second}, ",") }; assert(false); } diff --git a/src/libstore/package.nix b/src/libstore/package.nix index 4582ba0d2..f04e3b95f 100644 --- a/src/libstore/package.nix +++ b/src/libstore/package.nix @@ -1,12 +1,9 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools +, mkMesonLibrary -, meson -, ninja -, pkg-config , unixtools +, darwin , nix-util , boost @@ -29,7 +26,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-store"; inherit version; @@ -51,13 +48,8 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "sql") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ] ++ lib.optional embeddedSandboxShell unixtools.hexdump; + nativeBuildInputs = + lib.optional embeddedSandboxShell unixtools.hexdump; buildInputs = [ boost @@ -65,6 +57,7 @@ mkMesonDerivation (finalAttrs: { sqlite ] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp # There have been issues building these dependencies + ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) aws-sdk-cpp ; @@ -98,10 +91,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh index 71f1476a6..9a4c466a8 100644 --- a/src/libstore/path-info.hh +++ b/src/libstore/path-info.hh @@ -176,17 +176,18 @@ struct ValidPathInfo : UnkeyedValidPathInfo { */ Strings shortRefs() const; - ValidPathInfo(const ValidPathInfo & other) = default; - ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(std::move(path)) { }; ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(path) { }; ValidPathInfo(const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); - - virtual ~ValidPathInfo() { } }; +static_assert(std::is_move_assignable_v); +static_assert(std::is_copy_assignable_v); +static_assert(std::is_copy_constructible_v); +static_assert(std::is_move_constructible_v); + using ValidPathInfos = std::map; } diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index 161d023d1..e526b1ff6 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -37,6 +37,7 @@ DerivedPath StorePathWithOutputs::toDerivedPath() const std::vector toDerivedPaths(const std::vector ss) { std::vector reqs; + reqs.reserve(ss.size()); for (auto & s : ss) reqs.push_back(s.toDerivedPath()); return reqs; } diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc index 37793db5b..c855e797f 100644 --- a/src/libstore/pathlocks.cc +++ b/src/libstore/pathlocks.cc @@ -27,7 +27,7 @@ PathLocks::~PathLocks() try { unlock(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index 20f1d826c..7e360b5fe 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -30,7 +30,7 @@ ref RemoteFSAccessor::addToCache(std::string_view hashPart, std: /* FIXME: do this asynchronously. */ writeFile(makeCacheFile(hashPart, "nar"), nar); } catch (...) { - ignoreException(); + ignoreExceptionExceptInterrupt(); } } @@ -42,7 +42,7 @@ ref RemoteFSAccessor::addToCache(std::string_view hashPart, std: nlohmann::json j = listNar(narAccessor, CanonPath::root, true); writeFile(makeCacheFile(hashPart, "ls"), j.dump()); } catch (...) { - ignoreException(); + ignoreExceptionExceptInterrupt(); } } diff --git a/src/libstore/remote-store-connection.hh b/src/libstore/remote-store-connection.hh index 405120ee9..f8549d0b2 100644 --- a/src/libstore/remote-store-connection.hh +++ b/src/libstore/remote-store-connection.hh @@ -40,7 +40,7 @@ struct RemoteStore::ConnectionHandle : handle(std::move(handle)) { } - ConnectionHandle(ConnectionHandle && h) + ConnectionHandle(ConnectionHandle && h) noexcept : handle(std::move(h.handle)) { } @@ -49,7 +49,7 @@ struct RemoteStore::ConnectionHandle RemoteStore::Connection & operator * () { return *handle; } RemoteStore::Connection * operator -> () { return &*handle; } - void processStderr(Sink * sink = 0, Source * source = 0, bool flush = true); + void processStderr(Sink * sink = 0, Source * source = 0, bool flush = true, bool block = true); void withFramedSink(std::function fun); }; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 555936c18..69bbc64fc 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -153,9 +153,9 @@ RemoteStore::ConnectionHandle::~ConnectionHandle() } } -void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, bool flush) +void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, bool flush, bool block) { - handle->processStderr(&daemonException, sink, source, flush); + handle->processStderr(&daemonException, sink, source, flush, block); } @@ -926,43 +926,17 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::functionto.flush(); - std::exception_ptr ex; - - /* Handle log messages / exceptions from the remote on a separate - thread. */ - std::thread stderrThread([&]() { - try { - ReceiveInterrupts receiveInterrupts; - processStderr(nullptr, nullptr, false); - } catch (...) { - ex = std::current_exception(); - } - }); - - Finally joinStderrThread([&]() - { - if (stderrThread.joinable()) { - stderrThread.join(); - if (ex) { - try { - std::rethrow_exception(ex); - } catch (...) { - ignoreException(); - } - } - } - }); - - { - FramedSink sink((*this)->to, ex); + FramedSink sink((*this)->to, [&]() { + /* Periodically process stderr messages and exceptions + from the daemon. */ + processStderr(nullptr, nullptr, false, false); + }); fun(sink); sink.flush(); } - stderrThread.join(); - if (ex) - std::rethrow_exception(ex); + processStderr(nullptr, nullptr, false); } } diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 21175b1eb..bcbf0b55e 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -9,6 +9,7 @@ #include "globals.hh" #include "compression.hh" #include "filetransfer.hh" +#include "signals.hh" #include #include @@ -117,6 +118,7 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy { bool ShouldRetry(const Aws::Client::AWSError& error, long attemptedRetries) const override { + checkInterrupt(); auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries); if (retry) printError("AWS error '%s' (%s), will retry in %d ms", diff --git a/src/libstore/s3-binary-cache-store.md b/src/libstore/s3-binary-cache-store.md index 675470261..daa41defd 100644 --- a/src/libstore/s3-binary-cache-store.md +++ b/src/libstore/s3-binary-cache-store.md @@ -3,7 +3,7 @@ R"( **Store URL format**: `s3://`*bucket-name* This store allows reading and writing a binary cache stored in an AWS S3 (or S3-compatible service) bucket. -This store shares many idioms with the [HTTP Binary Cache Store](#http-binary-cache-store). +This store shares many idioms with the [HTTP Binary Cache Store](@docroot@/store/types/http-binary-cache-store.md). For AWS S3, the binary cache URL for a bucket named `example-nix-cache` will be exactly . For S3 compatible binary caches, consult that cache's documentation. diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index 3175c1978..f02e472fd 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -86,7 +86,7 @@ SQLite::~SQLite() if (db && sqlite3_close(db) != SQLITE_OK) SQLiteError::throw_(db, "closing database"); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -125,7 +125,7 @@ SQLiteStmt::~SQLiteStmt() if (stmt && sqlite3_finalize(stmt) != SQLITE_OK) SQLiteError::throw_(db, "finalizing statement '%s'", sql); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -240,7 +240,7 @@ SQLiteTxn::~SQLiteTxn() if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK) SQLiteError::throw_(db, "aborting transaction"); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 003e4d101..037380b71 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -42,7 +42,8 @@ struct SQLite SQLite(const Path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal); SQLite(const SQLite & from) = delete; SQLite& operator = (const SQLite & from) = delete; - SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; } + // NOTE: This is noexcept since we are only copying and assigning raw pointers. + SQLite& operator = (SQLite && from) noexcept { db = from.db; from.db = 0; return *this; } ~SQLite(); operator sqlite3 * () { return db; } diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index e5d623adf..dec733fd5 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -3,9 +3,20 @@ #include "current-process.hh" #include "environment-variables.hh" #include "util.hh" +#include "exec.hh" namespace nix { +static std::string parsePublicHostKey(std::string_view host, std::string_view sshPublicHostKey) +{ + try { + return base64Decode(sshPublicHostKey); + } catch (Error & e) { + e.addTrace({}, "while decoding ssh public host key for host '%s'", host); + throw; + } +} + SSHMaster::SSHMaster( std::string_view host, std::string_view keyFile, @@ -14,7 +25,7 @@ SSHMaster::SSHMaster( : host(host) , fakeSSH(host == "localhost") , keyFile(keyFile) - , sshPublicHostKey(sshPublicHostKey) + , sshPublicHostKey(parsePublicHostKey(host, sshPublicHostKey)) , useMaster(useMaster && !fakeSSH) , compress(compress) , logFD(logFD) @@ -38,12 +49,16 @@ void SSHMaster::addCommonSSHOpts(Strings & args) std::filesystem::path fileName = state->tmpDir->path() / "host-key"; auto p = host.rfind("@"); std::string thost = p != std::string::npos ? std::string(host, p + 1) : host; - writeFile(fileName.string(), thost + " " + base64Decode(sshPublicHostKey) + "\n"); + writeFile(fileName.string(), thost + " " + sshPublicHostKey + "\n"); args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName.string()}); } if (compress) args.push_back("-C"); + // We use this to make ssh signal back to us that the connection is established. + // It really does run locally; see createSSHEnv which sets up SHELL to make + // it launch more reliably. The local command runs synchronously, so presumably + // the remote session won't be garbled if the local command is slow. args.push_back("-oPermitLocalCommand=yes"); args.push_back("-oLocalCommand=echo started"); } @@ -56,6 +71,27 @@ bool SSHMaster::isMasterRunning() { return res.first == 0; } +Strings createSSHEnv() +{ + // Copy the environment and set SHELL=/bin/sh + std::map env = getEnv(); + + // SSH will invoke the "user" shell for -oLocalCommand, but that means + // $SHELL. To keep things simple and avoid potential issues with other + // shells, we set it to /bin/sh. + // Technically, we don't need that, and we could reinvoke ourselves to print + // "started". Self-reinvocation is tricky with library consumers, but mostly + // solved; refer to the development history of nixExePath in libstore/globals.cc. + env.insert_or_assign("SHELL", "/bin/sh"); + + Strings r; + for (auto & [k, v] : env) { + r.push_back(k + "=" + v); + } + + return r; +} + std::unique_ptr SSHMaster::startCommand( Strings && command, Strings && extraSshArgs) { @@ -104,8 +140,8 @@ std::unique_ptr SSHMaster::startCommand( } args.splice(args.end(), std::move(command)); - - execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); + auto env = createSSHEnv(); + nix::execvpe(args.begin()->c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(env).data()); // could not exec ssh/bash throw SysError("unable to execute '%s'", args.front()); @@ -172,7 +208,8 @@ Path SSHMaster::startMaster() if (verbosity >= lvlChatty) args.push_back("-v"); addCommonSSHOpts(args); - execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); + auto env = createSSHEnv(); + nix::execvpe(args.begin()->c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(env).data()); throw SysError("unable to execute '%s'", args.front()); }, options); diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 19b30e883..4097134d0 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -14,6 +14,9 @@ private: const std::string host; bool fakeSSH; const std::string keyFile; + /** + * Raw bytes, not Base64 encoding. + */ const std::string sshPublicHostKey; const bool useMaster; const bool compress; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index b3e5ad014..10577fa2a 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -171,7 +171,7 @@ std::pair StoreDirConfig::computeStorePath( PathFilter & filter) const { auto [h, size] = hashPath(path, method.getFileIngestionMethod(), hashAlgo, filter); - if (size && *size >= settings.warnLargePathThreshold) + if (settings.warnLargePathThreshold && size && *size >= settings.warnLargePathThreshold) warn("hashed large path '%s' (%s)", path, renderSize(*size)); return { makeFixedOutputPathFromCA( @@ -210,14 +210,16 @@ StorePath Store::addToStore( fsm = FileSerialisationMethod::NixArchive; break; } - auto source = sinkToSource([&](Sink & sink) { - dumpPath(path, sink, fsm, filter); + std::optional storePath; + auto sink = sourceToSink([&](Source & source) { + LengthSource lengthSource(source); + storePath = addToStoreFromDump(lengthSource, name, fsm, method, hashAlgo, references, repair); + if (settings.warnLargePathThreshold && lengthSource.total >= settings.warnLargePathThreshold) + warn("copied large path '%s' to the store (%s)", path, renderSize(lengthSource.total)); }); - LengthSource lengthSource(*source); - auto storePath = addToStoreFromDump(lengthSource, name, fsm, method, hashAlgo, references, repair); - if (lengthSource.total >= settings.warnLargePathThreshold) - warn("copied large path '%s' to the store (%s)", path, renderSize(lengthSource.total)); - return storePath; + dumpPath(path, *sink, fsm, filter); + sink->finish(); + return storePath.value(); } void Store::addMultipleToStore( @@ -820,14 +822,25 @@ StorePathSet Store::queryValidPaths(const StorePathSet & paths, SubstituteFlag m auto doQuery = [&](const StorePath & path) { checkInterrupt(); queryPathInfo(path, {[path, &state_, &wakeup](std::future> fut) { - auto state(state_.lock()); + bool exists = false; + std::exception_ptr newExc{}; + try { auto info = fut.get(); - state->valid.insert(path); + exists = true; } catch (InvalidPath &) { } catch (...) { - state->exc = std::current_exception(); + newExc = std::current_exception(); } + + auto state(state_.lock()); + + if (exists) + state->valid.insert(path); + + if (newExc) + state->exc = newExc; + assert(state->left); if (!--state->left) wakeup.notify_one(); @@ -1042,7 +1055,7 @@ std::map copyPaths( // not be within our control to change that, and we might still want // to at least copy the output paths. if (e.missingFeature == Xp::CaDerivations) - ignoreException(); + ignoreExceptionExceptInterrupt(); else throw; } @@ -1302,7 +1315,7 @@ ref openStore(StoreReference && storeURI) /* If /nix doesn't exist, there is no daemon socket, and we're not root, then automatically set up a chroot store in ~/.local/share/nix/root. */ - auto chrootStore = getDataDir() + "/nix/root"; + auto chrootStore = getDataDir() + "/root"; if (!pathExists(chrootStore)) { try { createDirs(chrootStore); diff --git a/src/libstore/unix/build/hook-instance.cc b/src/libstore/unix/build/hook-instance.cc index 4f8492fe9..79eb25a91 100644 --- a/src/libstore/unix/build/hook-instance.cc +++ b/src/libstore/unix/build/hook-instance.cc @@ -91,7 +91,7 @@ HookInstance::~HookInstance() toHook.writeSide = -1; if (pid != -1) pid.kill(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } diff --git a/src/libstore/unix/build/local-derivation-goal.cc b/src/libstore/unix/build/local-derivation-goal.cc index d3482df17..dcfaadeef 100644 --- a/src/libstore/unix/build/local-derivation-goal.cc +++ b/src/libstore/unix/build/local-derivation-goal.cc @@ -58,6 +58,10 @@ #if __APPLE__ #include #include +#include + +/* This definition is undocumented but depended upon by all major browsers. */ +extern "C" int sandbox_init_with_parameters(const char *profile, uint64_t flags, const char *const parameters[], char **errorbuf); #endif #include @@ -65,6 +69,7 @@ #include #include "strings.hh" +#include "signals.hh" namespace nix { @@ -109,9 +114,9 @@ LocalDerivationGoal::~LocalDerivationGoal() { /* Careful: we should never ever throw an exception from a destructor. */ - try { deleteTmpDir(false); } catch (...) { ignoreException(); } - try { killChild(); } catch (...) { ignoreException(); } - try { stopDaemon(); } catch (...) { ignoreException(); } + try { deleteTmpDir(false); } catch (...) { ignoreExceptionInDestructor(); } + try { killChild(); } catch (...) { ignoreExceptionInDestructor(); } + try { stopDaemon(); } catch (...) { ignoreExceptionInDestructor(); } } @@ -433,6 +438,41 @@ static void doBind(const Path & source, const Path & target, bool optional = fal }; #endif +/** + * Rethrow the current exception as a subclass of `Error`. + */ +static void rethrowExceptionAsError() +{ + try { + throw; + } catch (Error &) { + throw; + } catch (std::exception & e) { + throw Error(e.what()); + } catch (...) { + throw Error("unknown exception"); + } +} + +/** + * Send the current exception to the parent in the format expected by + * `LocalDerivationGoal::processSandboxSetupMessages()`. + */ +static void handleChildException(bool sendException) +{ + try { + rethrowExceptionAsError(); + } catch (Error & e) { + if (sendException) { + writeFull(STDERR_FILENO, "\1\n"); + FdSink sink(STDERR_FILENO); + sink << e; + sink.flush(); + } else + std::cerr << e.msg(); + } +} + void LocalDerivationGoal::startBuilder() { if ((buildUser && buildUser->getUIDCount() != 1) @@ -444,25 +484,22 @@ void LocalDerivationGoal::startBuilder() #if __linux__ experimentalFeatureSettings.require(Xp::Cgroups); + /* If we're running from the daemon, then this will return the + root cgroup of the service. Otherwise, it will return the + current cgroup. */ + auto rootCgroup = getRootCgroup(); auto cgroupFS = getCgroupFS(); if (!cgroupFS) throw Error("cannot determine the cgroups file system"); - - auto ourCgroups = getCgroups("/proc/self/cgroup"); - auto ourCgroup = ourCgroups[""]; - if (ourCgroup == "") - throw Error("cannot determine cgroup name from /proc/self/cgroup"); - - auto ourCgroupPath = canonPath(*cgroupFS + "/" + ourCgroup); - - if (!pathExists(ourCgroupPath)) - throw Error("expected cgroup directory '%s'", ourCgroupPath); + auto rootCgroupPath = canonPath(*cgroupFS + "/" + rootCgroup); + if (!pathExists(rootCgroupPath)) + throw Error("expected cgroup directory '%s'", rootCgroupPath); static std::atomic counter{0}; cgroup = buildUser - ? fmt("%s/nix-build-uid-%d", ourCgroupPath, buildUser->getUID()) - : fmt("%s/nix-build-pid-%d-%d", ourCgroupPath, getpid(), counter++); + ? fmt("%s/nix-build-uid-%d", rootCgroupPath, buildUser->getUID()) + : fmt("%s/nix-build-pid-%d-%d", rootCgroupPath, getpid(), counter++); debug("using cgroup '%s'", *cgroup); @@ -952,32 +989,40 @@ void LocalDerivationGoal::startBuilder() root. */ openSlave(); - /* Drop additional groups here because we can't do it - after we've created the new user namespace. */ - if (setgroups(0, 0) == -1) { - if (errno != EPERM) - throw SysError("setgroups failed"); - if (settings.requireDropSupplementaryGroups) - throw Error("setgroups failed. Set the require-drop-supplementary-groups option to false to skip this step."); + try { + /* Drop additional groups here because we can't do it + after we've created the new user namespace. */ + if (setgroups(0, 0) == -1) { + if (errno != EPERM) + throw SysError("setgroups failed"); + if (settings.requireDropSupplementaryGroups) + throw Error("setgroups failed. Set the require-drop-supplementary-groups option to false to skip this step."); + } + + ProcessOptions options; + options.cloneFlags = CLONE_NEWPID | CLONE_NEWNS | CLONE_NEWIPC | CLONE_NEWUTS | CLONE_PARENT | SIGCHLD; + if (privateNetwork) + options.cloneFlags |= CLONE_NEWNET; + if (usingUserNamespace) + options.cloneFlags |= CLONE_NEWUSER; + + pid_t child = startProcess([&]() { runChild(); }, options); + + writeFull(sendPid.writeSide.get(), fmt("%d\n", child)); + _exit(0); + } catch (...) { + handleChildException(true); + _exit(1); } - - ProcessOptions options; - options.cloneFlags = CLONE_NEWPID | CLONE_NEWNS | CLONE_NEWIPC | CLONE_NEWUTS | CLONE_PARENT | SIGCHLD; - if (privateNetwork) - options.cloneFlags |= CLONE_NEWNET; - if (usingUserNamespace) - options.cloneFlags |= CLONE_NEWUSER; - - pid_t child = startProcess([&]() { runChild(); }, options); - - writeFull(sendPid.writeSide.get(), fmt("%d\n", child)); - _exit(0); }); sendPid.writeSide.close(); - if (helper.wait() != 0) + if (helper.wait() != 0) { + processSandboxSetupMessages(); + // Only reached if the child process didn't send an exception. throw Error("unable to start build process"); + } userNamespaceSync.readSide = -1; @@ -1053,7 +1098,12 @@ void LocalDerivationGoal::startBuilder() pid.setSeparatePG(true); worker.childStarted(shared_from_this(), {builderOut.get()}, true, true); - /* Check if setting up the build environment failed. */ + processSandboxSetupMessages(); +} + + +void LocalDerivationGoal::processSandboxSetupMessages() +{ std::vector msgs; while (true) { std::string msg = [&]() { @@ -1081,7 +1131,8 @@ void LocalDerivationGoal::startBuilder() } -void LocalDerivationGoal::initTmpDir() { +void LocalDerivationGoal::initTmpDir() +{ /* In a sandbox, for determinism, always use the same temporary directory. */ #if __linux__ @@ -1533,8 +1584,10 @@ void LocalDerivationGoal::startDaemon() FdSink(remote.get()), NotTrusted, daemon::Recursive); debug("terminated daemon connection"); + } catch (const Interrupted &) { + debug("interrupted daemon connection"); } catch (SystemError &) { - ignoreException(); + ignoreExceptionExceptInterrupt(); } }); @@ -1746,13 +1799,20 @@ void LocalDerivationGoal::runChild() bool setUser = true; - /* Make the contents of netrc available to builtin:fetchurl - (which may run under a different uid and/or in a sandbox). */ + /* Make the contents of netrc and the CA certificate bundle + available to builtin:fetchurl (which may run under a + different uid and/or in a sandbox). */ std::string netrcData; - try { - if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") - netrcData = readFile(settings.netrcFile); - } catch (SystemError &) { } + std::string caFileData; + if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") { + try { + netrcData = readFile(settings.netrcFile); + } catch (SystemError &) { } + + try { + caFileData = readFile(settings.caFile); + } catch (SystemError &) { } + } #if __linux__ if (useChroot) { @@ -1955,7 +2015,7 @@ void LocalDerivationGoal::runChild() if (chdir(chrootRootDir.c_str()) == -1) throw SysError("cannot change directory to '%1%'", chrootRootDir); - if (mkdir("real-root", 0) == -1) + if (mkdir("real-root", 0500) == -1) throw SysError("cannot create real-root directory"); if (pivot_root(".", "real-root") == -1) @@ -1986,7 +2046,7 @@ void LocalDerivationGoal::runChild() throw SysError("changing into '%1%'", tmpDir); /* Close all other file descriptors. */ - unix::closeMostFDs({STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO}); + unix::closeExtraFDs(); #if __linux__ linux::setPersonality(drv->platform); @@ -2027,154 +2087,130 @@ void LocalDerivationGoal::runChild() throw SysError("setuid failed"); } - /* Fill in the arguments. */ - Strings args; - - std::string builder = "invalid"; - - if (drv->isBuiltin()) { - ; - } #if __APPLE__ - else { - /* This has to appear before import statements. */ - std::string sandboxProfile = "(version 1)\n"; + /* This has to appear before import statements. */ + std::string sandboxProfile = "(version 1)\n"; - if (useChroot) { + if (useChroot) { - /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ - PathSet ancestry; + /* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */ + PathSet ancestry; - /* We build the ancestry before adding all inputPaths to the store because we know they'll - all have the same parents (the store), and there might be lots of inputs. This isn't - particularly efficient... I doubt it'll be a bottleneck in practice */ - for (auto & i : pathsInChroot) { - Path cur = i.first; - while (cur.compare("/") != 0) { - cur = dirOf(cur); - ancestry.insert(cur); - } - } - - /* And we want the store in there regardless of how empty pathsInChroot. We include the innermost - path component this time, since it's typically /nix/store and we care about that. */ - Path cur = worker.store.storeDir; + /* We build the ancestry before adding all inputPaths to the store because we know they'll + all have the same parents (the store), and there might be lots of inputs. This isn't + particularly efficient... I doubt it'll be a bottleneck in practice */ + for (auto & i : pathsInChroot) { + Path cur = i.first; while (cur.compare("/") != 0) { - ancestry.insert(cur); cur = dirOf(cur); + ancestry.insert(cur); } + } - /* Add all our input paths to the chroot */ - for (auto & i : inputPaths) { - auto p = worker.store.printStorePath(i); - pathsInChroot[p] = p; - } + /* And we want the store in there regardless of how empty pathsInChroot. We include the innermost + path component this time, since it's typically /nix/store and we care about that. */ + Path cur = worker.store.storeDir; + while (cur.compare("/") != 0) { + ancestry.insert(cur); + cur = dirOf(cur); + } - /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ - if (settings.darwinLogSandboxViolations) { - sandboxProfile += "(deny default)\n"; - } else { - sandboxProfile += "(deny default (with no-log))\n"; - } + /* Add all our input paths to the chroot */ + for (auto & i : inputPaths) { + auto p = worker.store.printStorePath(i); + pathsInChroot[p] = p; + } - sandboxProfile += - #include "sandbox-defaults.sb" - ; - - if (!derivationType->isSandboxed()) - sandboxProfile += - #include "sandbox-network.sb" - ; - - /* Add the output paths we'll use at build-time to the chroot */ - sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & [_, path] : scratchOutputs) - sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(path)); - - sandboxProfile += ")\n"; - - /* Our inputs (transitive dependencies and any impurities computed above) - - without file-write* allowed, access() incorrectly returns EPERM - */ - sandboxProfile += "(allow file-read* file-write* process-exec\n"; - for (auto & i : pathsInChroot) { - if (i.first != i.second.source) - throw Error( - "can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin", - i.first, i.second.source); - - std::string path = i.first; - auto optSt = maybeLstat(path.c_str()); - if (!optSt) { - if (i.second.optional) - continue; - throw SysError("getting attributes of required path '%s", path); - } - if (S_ISDIR(optSt->st_mode)) - sandboxProfile += fmt("\t(subpath \"%s\")\n", path); - else - sandboxProfile += fmt("\t(literal \"%s\")\n", path); - } - sandboxProfile += ")\n"; - - /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ - sandboxProfile += "(allow file-read*\n"; - for (auto & i : ancestry) { - sandboxProfile += fmt("\t(literal \"%s\")\n", i); - } - sandboxProfile += ")\n"; - - sandboxProfile += additionalSandboxProfile; - } else - sandboxProfile += - #include "sandbox-minimal.sb" - ; - - debug("Generated sandbox profile:"); - debug(sandboxProfile); - - Path sandboxFile = tmpDir + "/.sandbox.sb"; - - writeFile(sandboxFile, sandboxProfile); - - bool allowLocalNetworking = parsedDrv->getBoolAttr("__darwinAllowLocalNetworking"); - - /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms - to find temporary directories, so we want to open up a broader place for them to put their files, if needed. */ - Path globalTmpDir = canonPath(defaultTempDir(), true); - - /* They don't like trailing slashes on subpath directives */ - while (!globalTmpDir.empty() && globalTmpDir.back() == '/') - globalTmpDir.pop_back(); - - if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") { - builder = "/usr/bin/sandbox-exec"; - args.push_back("sandbox-exec"); - args.push_back("-f"); - args.push_back(sandboxFile); - args.push_back("-D"); - args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); - if (allowLocalNetworking) { - args.push_back("-D"); - args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1")); - } - args.push_back(drv->builder); + /* Violations will go to the syslog if you set this. Unfortunately the destination does not appear to be configurable */ + if (settings.darwinLogSandboxViolations) { + sandboxProfile += "(deny default)\n"; } else { - builder = drv->builder; - args.push_back(std::string(baseNameOf(drv->builder))); + sandboxProfile += "(deny default (with no-log))\n"; + } + + sandboxProfile += + #include "sandbox-defaults.sb" + ; + + if (!derivationType->isSandboxed()) + sandboxProfile += + #include "sandbox-network.sb" + ; + + /* Add the output paths we'll use at build-time to the chroot */ + sandboxProfile += "(allow file-read* file-write* process-exec\n"; + for (auto & [_, path] : scratchOutputs) + sandboxProfile += fmt("\t(subpath \"%s\")\n", worker.store.printStorePath(path)); + + sandboxProfile += ")\n"; + + /* Our inputs (transitive dependencies and any impurities computed above) + + without file-write* allowed, access() incorrectly returns EPERM + */ + sandboxProfile += "(allow file-read* file-write* process-exec\n"; + for (auto & i : pathsInChroot) { + if (i.first != i.second.source) + throw Error( + "can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin", + i.first, i.second.source); + + std::string path = i.first; + auto optSt = maybeLstat(path.c_str()); + if (!optSt) { + if (i.second.optional) + continue; + throw SysError("getting attributes of required path '%s", path); + } + if (S_ISDIR(optSt->st_mode)) + sandboxProfile += fmt("\t(subpath \"%s\")\n", path); + else + sandboxProfile += fmt("\t(literal \"%s\")\n", path); + } + sandboxProfile += ")\n"; + + /* Allow file-read* on full directory hierarchy to self. Allows realpath() */ + sandboxProfile += "(allow file-read*\n"; + for (auto & i : ancestry) { + sandboxProfile += fmt("\t(literal \"%s\")\n", i); + } + sandboxProfile += ")\n"; + + sandboxProfile += additionalSandboxProfile; + } else + sandboxProfile += + #include "sandbox-minimal.sb" + ; + + debug("Generated sandbox profile:"); + debug(sandboxProfile); + + bool allowLocalNetworking = parsedDrv->getBoolAttr("__darwinAllowLocalNetworking"); + + /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms + to find temporary directories, so we want to open up a broader place for them to put their files, if needed. */ + Path globalTmpDir = canonPath(defaultTempDir(), true); + + /* They don't like trailing slashes on subpath directives */ + while (!globalTmpDir.empty() && globalTmpDir.back() == '/') + globalTmpDir.pop_back(); + + if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") { + Strings sandboxArgs; + sandboxArgs.push_back("_GLOBAL_TMP_DIR"); + sandboxArgs.push_back(globalTmpDir); + if (allowLocalNetworking) { + sandboxArgs.push_back("_ALLOW_LOCAL_NETWORKING"); + sandboxArgs.push_back("1"); + } + char * sandbox_errbuf = nullptr; + if (sandbox_init_with_parameters(sandboxProfile.c_str(), 0, stringsToCharPtrs(sandboxArgs).data(), &sandbox_errbuf)) { + writeFull(STDERR_FILENO, fmt("failed to configure sandbox: %s\n", sandbox_errbuf ? sandbox_errbuf : "(null)")); + _exit(1); } } -#else - else { - builder = drv->builder; - args.push_back(std::string(baseNameOf(drv->builder))); - } #endif - for (auto & i : drv->args) - args.push_back(rewriteStrings(i, inputRewrites)); - /* Indicate that we managed to set up the build environment. */ writeFull(STDERR_FILENO, std::string("\2\n")); @@ -2191,7 +2227,7 @@ void LocalDerivationGoal::runChild() worker.store.printStorePath(scratchOutputs.at(e.first))); if (drv->builder == "builtin:fetchurl") - builtinFetchurl(*drv, outputs, netrcData); + builtinFetchurl(*drv, outputs, netrcData, caFileData); else if (drv->builder == "builtin:buildenv") builtinBuildenv(*drv, outputs); else if (drv->builder == "builtin:unpack-channel") @@ -2205,6 +2241,14 @@ void LocalDerivationGoal::runChild() } } + // Now builder is not builtin + + Strings args; + args.push_back(std::string(baseNameOf(drv->builder))); + + for (auto & i : drv->args) + args.push_back(rewriteStrings(i, inputRewrites)); + #if __APPLE__ posix_spawnattr_t attrp; @@ -2226,21 +2270,15 @@ void LocalDerivationGoal::runChild() posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL); } - posix_spawn(NULL, builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); + posix_spawn(NULL, drv->builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); #else - execve(builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); + execve(drv->builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data()); #endif throw SysError("executing '%1%'", drv->builder); - } catch (Error & e) { - if (sendException) { - writeFull(STDERR_FILENO, "\1\n"); - FdSink sink(STDERR_FILENO); - sink << e; - sink.flush(); - } else - std::cerr << e.msg(); + } catch (...) { + handleChildException(sendException); _exit(1); } } @@ -3000,6 +3038,7 @@ void LocalDerivationGoal::deleteTmpDir(bool force) might have privileged stuff (like a copy of netrc). */ if (settings.keepFailed && !force && !drv->isBuiltin()) { printError("note: keeping build directory '%s'", tmpDir); + chmod(topTmpDir.c_str(), 0755); chmod(tmpDir.c_str(), 0755); } else @@ -3020,7 +3059,7 @@ bool LocalDerivationGoal::isReadDesc(int fd) StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName) { // This is a bogus path type, constructed this way to ensure that it doesn't collide with any other store path - // See doc/manual/src/protocols/store-path.md for details + // See doc/manual/source/protocols/store-path.md for details // TODO: We may want to separate the responsibilities of constructing the path fingerprint and of actually doing the hashing auto pathType = "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName); return worker.store.makeStorePath( @@ -3033,7 +3072,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName) StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path) { // This is a bogus path type, constructed this way to ensure that it doesn't collide with any other store path - // See doc/manual/src/protocols/store-path.md for details + // See doc/manual/source/protocols/store-path.md for details auto pathType = "rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()); return worker.store.makeStorePath( pathType, diff --git a/src/libstore/unix/build/local-derivation-goal.hh b/src/libstore/unix/build/local-derivation-goal.hh index bf25cf2a6..1ea247661 100644 --- a/src/libstore/unix/build/local-derivation-goal.hh +++ b/src/libstore/unix/build/local-derivation-goal.hh @@ -210,6 +210,11 @@ struct LocalDerivationGoal : public DerivationGoal */ void initEnv(); + /** + * Process messages send by the sandbox initialization. + */ + void processSandboxSetupMessages(); + /** * Setup tmp dir location. */ @@ -220,8 +225,15 @@ struct LocalDerivationGoal : public DerivationGoal */ void writeStructuredAttrs(); + /** + * Start an in-process nix daemon thread for recursive-nix. + */ void startDaemon(); + /** + * Stop the in-process nix daemon thread. + * @see startDaemon + */ void stopDaemon(); /** diff --git a/src/libstore/unix/build/sandbox-defaults.sb b/src/libstore/unix/build/sandbox-defaults.sb index 6da01b735..15cd6daf5 100644 --- a/src/libstore/unix/build/sandbox-defaults.sb +++ b/src/libstore/unix/build/sandbox-defaults.sb @@ -49,6 +49,7 @@ R""( (if (param "_ALLOW_LOCAL_NETWORKING") (begin (allow network* (remote ip "localhost:*")) + (allow network-inbound (local ip "*:*")) ; required to bind and listen ; Allow access to /etc/resolv.conf (which is a symlink to ; /private/var/run/resolv.conf). diff --git a/src/libstore/worker-protocol-connection.cc b/src/libstore/worker-protocol-connection.cc index a47dbb689..6585df4be 100644 --- a/src/libstore/worker-protocol-connection.cc +++ b/src/libstore/worker-protocol-connection.cc @@ -12,7 +12,7 @@ WorkerProto::BasicClientConnection::~BasicClientConnection() try { to.flush(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -32,7 +32,8 @@ static Logger::Fields readFields(Source & from) return fields; } -std::exception_ptr WorkerProto::BasicClientConnection::processStderrReturn(Sink * sink, Source * source, bool flush) +std::exception_ptr +WorkerProto::BasicClientConnection::processStderrReturn(Sink * sink, Source * source, bool flush, bool block) { if (flush) to.flush(); @@ -41,6 +42,9 @@ std::exception_ptr WorkerProto::BasicClientConnection::processStderrReturn(Sink while (true) { + if (!block && !from.hasData()) + break; + auto msg = readNum(from); if (msg == STDERR_WRITE) { @@ -95,8 +99,10 @@ std::exception_ptr WorkerProto::BasicClientConnection::processStderrReturn(Sink logger->result(act, type, fields); } - else if (msg == STDERR_LAST) + else if (msg == STDERR_LAST) { + assert(block); break; + } else throw Error("got unknown message type %x from Nix daemon", msg); @@ -130,9 +136,10 @@ std::exception_ptr WorkerProto::BasicClientConnection::processStderrReturn(Sink } } -void WorkerProto::BasicClientConnection::processStderr(bool * daemonException, Sink * sink, Source * source, bool flush) +void WorkerProto::BasicClientConnection::processStderr( + bool * daemonException, Sink * sink, Source * source, bool flush, bool block) { - auto ex = processStderrReturn(sink, source, flush); + auto ex = processStderrReturn(sink, source, flush, block); if (ex) { *daemonException = true; std::rethrow_exception(ex); diff --git a/src/libstore/worker-protocol-connection.hh b/src/libstore/worker-protocol-connection.hh index 9c96195b5..9665067dd 100644 --- a/src/libstore/worker-protocol-connection.hh +++ b/src/libstore/worker-protocol-connection.hh @@ -70,9 +70,10 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection virtual void closeWrite() = 0; - std::exception_ptr processStderrReturn(Sink * sink = 0, Source * source = 0, bool flush = true); + std::exception_ptr processStderrReturn(Sink * sink = 0, Source * source = 0, bool flush = true, bool block = true); - void processStderr(bool * daemonException, Sink * sink = 0, Source * source = 0, bool flush = true); + void + processStderr(bool * daemonException, Sink * sink = 0, Source * source = 0, bool flush = true, bool block = true); /** * Establishes connection, negotiating version. diff --git a/src/libutil-c/meson.build b/src/libutil-c/meson.build index b5ed19631..3d5a0b9c2 100644 --- a/src/libutil-c/meson.build +++ b/src/libutil-c/meson.build @@ -63,6 +63,7 @@ headers = [config_h] + files( headers += files('nix_api_util_internal.h') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nixutilc', diff --git a/src/libutil-c/nix_api_util.cc b/src/libutil-c/nix_api_util.cc index 4f65a4c12..992ea0a2a 100644 --- a/src/libutil-c/nix_api_util.cc +++ b/src/libutil-c/nix_api_util.cc @@ -57,6 +57,12 @@ nix_err nix_set_err_msg(nix_c_context * context, nix_err err, const char * msg) return err; } +void nix_clear_err(nix_c_context * context) +{ + if (context) + context->last_err_code = NIX_OK; +} + const char * nix_version_get() { return PACKAGE_VERSION; @@ -106,7 +112,7 @@ const char * nix_err_msg(nix_c_context * context, const nix_c_context * read_con { if (context) context->last_err_code = NIX_OK; - if (read_context->last_err) { + if (read_context->last_err && read_context->last_err_code != NIX_OK) { if (n) *n = read_context->last_err->size(); return read_context->last_err->c_str(); diff --git a/src/libutil-c/nix_api_util.h b/src/libutil-c/nix_api_util.h index ad6f32859..6790a6964 100644 --- a/src/libutil-c/nix_api_util.h +++ b/src/libutil-c/nix_api_util.h @@ -225,7 +225,9 @@ const char * nix_version_get(); * @param[out] n optional: a pointer to an unsigned int that is set to the * length of the error. * @return nullptr if no error message was ever set, - * a borrowed pointer to the error message otherwise. + * a borrowed pointer to the error message otherwise, which is valid + * until the next call to a Nix function, or until the context is + * destroyed. */ const char * nix_err_msg(nix_c_context * context, const nix_c_context * ctx, unsigned int * n); @@ -286,13 +288,34 @@ nix_err nix_err_code(const nix_c_context * read_context); * * All other use is internal to the API. * - * @param context context to write the error message to, or NULL + * @param context context to write the error message to, required unless C++ exceptions are supported * @param err The error code to set and return - * @param msg The error message to set. + * @param msg The error message to set. This string is copied. * @returns the error code set */ nix_err nix_set_err_msg(nix_c_context * context, nix_err err, const char * msg); +/** + * @brief Clear the error message from a nix context. + * + * This is performed implicitly by all functions that accept a context, so + * this won't be necessary in most cases. + * However, if you want to clear the error message without calling another + * function, you can use this. + * + * Example use case: a higher order function that helps with error handling, + * to make it more robust in the following scenario: + * + * 1. A previous call failed, and the error was caught and handled. + * 2. The context is reused with our error handling helper function. + * 3. The callback passed to the helper function doesn't actually make a call to + * a Nix function. + * 4. The handled error is raised again, from an unrelated call. + * + * This failure can be avoided by clearing the error message after handling it. + */ +void nix_clear_err(nix_c_context * context); + /** * @} */ diff --git a/src/libutil-c/nix_api_util_internal.h b/src/libutil-c/nix_api_util_internal.h index aa829feaf..7fa4252ac 100644 --- a/src/libutil-c/nix_api_util_internal.h +++ b/src/libutil-c/nix_api_util_internal.h @@ -10,6 +10,7 @@ struct nix_c_context { nix_err last_err_code = NIX_OK; + /** The last error message. Always check last_err_code. This may not have been cleared, so that clearing is fast. */ std::optional last_err = {}; std::optional info = {}; std::string name = ""; diff --git a/src/libutil-c/package.nix b/src/libutil-c/package.nix index ccfafd4d3..35533f981 100644 --- a/src/libutil-c/package.nix +++ b/src/libutil-c/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util @@ -18,7 +13,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-util-c"; inherit version; @@ -35,14 +30,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "h") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-util ]; @@ -62,10 +49,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libutil-test-support/.version b/src/libutil-test-support/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libutil-test-support/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/libutil-test-support/build-utils-meson b/src/libutil-test-support/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libutil-test-support/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/src/libutil-test-support/local.mk b/src/libutil-test-support/local.mk new file mode 100644 index 000000000..5f7835c9f --- /dev/null +++ b/src/libutil-test-support/local.mk @@ -0,0 +1,19 @@ +libraries += libutil-test-support + +libutil-test-support_NAME = libnixutil-test-support + +libutil-test-support_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libutil-test-support_INSTALL_DIR := $(checklibdir) +else + libutil-test-support_INSTALL_DIR := +endif + +libutil-test-support_SOURCES := $(wildcard $(d)/tests/*.cc) + +libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) + +libutil-test-support_LIBS = libutil + +libutil-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libutil-support/meson.build b/src/libutil-test-support/meson.build similarity index 95% rename from tests/unit/libutil-support/meson.build rename to src/libutil-test-support/meson.build index 6be4972c6..c5e1ba80b 100644 --- a/tests/unit/libutil-support/meson.build +++ b/src/libutil-test-support/meson.build @@ -46,12 +46,14 @@ include_dirs = [include_directories('.')] headers = files( 'tests/characterization.hh', + 'tests/gtest-with-params.hh', 'tests/hash.hh', 'tests/nix_api_util.hh', 'tests/string_callback.hh', ) subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nix-util-test-support', diff --git a/tests/unit/libutil-support/package.nix b/src/libutil-test-support/package.nix similarity index 68% rename from tests/unit/libutil-support/package.nix rename to src/libutil-test-support/package.nix index 1665804cb..2525e1602 100644 --- a/tests/unit/libutil-support/package.nix +++ b/src/libutil-test-support/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , nix-util @@ -20,15 +15,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-util-test-support"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -36,14 +31,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - propagatedBuildInputs = [ nix-util rapidcheck @@ -54,7 +41,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -64,10 +51,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/tests/unit/libutil-support/tests/characterization.hh b/src/libutil-test-support/tests/characterization.hh similarity index 87% rename from tests/unit/libutil-support/tests/characterization.hh rename to src/libutil-test-support/tests/characterization.hh index 19ba824ac..5e790e75b 100644 --- a/tests/unit/libutil-support/tests/characterization.hh +++ b/src/libutil-test-support/tests/characterization.hh @@ -13,7 +13,7 @@ namespace nix { * The path to the unit test data directory. See the contributing guide * in the manual for further details. */ -static inline Path getUnitTestData() { +static inline std::filesystem::path getUnitTestData() { return getEnv("_NIX_TEST_UNIT_DATA").value(); } @@ -36,7 +36,7 @@ protected: * While the "golden master" for this characterization test is * located. It should not be shared with any other test. */ - virtual Path goldenMaster(PathView testStem) const = 0; + virtual std::filesystem::path goldenMaster(PathView testStem) const = 0; public: /** @@ -77,7 +77,7 @@ public: if (testAccept()) { - createDirs(dirOf(file)); + std::filesystem::create_directories(file.parent_path()); writeFile2(file, got); GTEST_SKIP() << "Updating golden master " @@ -97,10 +97,10 @@ public: { writeTest( testStem, test, - [](const Path & f) -> std::string { + [](const std::filesystem::path & f) -> std::string { return readFile(f); }, - [](const Path & f, const std::string & c) { + [](const std::filesystem::path & f, const std::string & c) { return writeFile(f, c); }); } diff --git a/tests/unit/libutil-support/tests/gtest-with-params.hh b/src/libutil-test-support/tests/gtest-with-params.hh similarity index 100% rename from tests/unit/libutil-support/tests/gtest-with-params.hh rename to src/libutil-test-support/tests/gtest-with-params.hh diff --git a/tests/unit/libutil-support/tests/hash.cc b/src/libutil-test-support/tests/hash.cc similarity index 100% rename from tests/unit/libutil-support/tests/hash.cc rename to src/libutil-test-support/tests/hash.cc diff --git a/tests/unit/libutil-support/tests/hash.hh b/src/libutil-test-support/tests/hash.hh similarity index 100% rename from tests/unit/libutil-support/tests/hash.hh rename to src/libutil-test-support/tests/hash.hh diff --git a/tests/unit/libutil-support/tests/nix_api_util.hh b/src/libutil-test-support/tests/nix_api_util.hh similarity index 100% rename from tests/unit/libutil-support/tests/nix_api_util.hh rename to src/libutil-test-support/tests/nix_api_util.hh diff --git a/tests/unit/libutil-support/tests/string_callback.cc b/src/libutil-test-support/tests/string_callback.cc similarity index 100% rename from tests/unit/libutil-support/tests/string_callback.cc rename to src/libutil-test-support/tests/string_callback.cc diff --git a/tests/unit/libutil-support/tests/string_callback.hh b/src/libutil-test-support/tests/string_callback.hh similarity index 100% rename from tests/unit/libutil-support/tests/string_callback.hh rename to src/libutil-test-support/tests/string_callback.hh diff --git a/tests/unit/libutil-support/tests/tracing-file-system-object-sink.cc b/src/libutil-test-support/tests/tracing-file-system-object-sink.cc similarity index 100% rename from tests/unit/libutil-support/tests/tracing-file-system-object-sink.cc rename to src/libutil-test-support/tests/tracing-file-system-object-sink.cc diff --git a/tests/unit/libutil-support/tests/tracing-file-system-object-sink.hh b/src/libutil-test-support/tests/tracing-file-system-object-sink.hh similarity index 100% rename from tests/unit/libutil-support/tests/tracing-file-system-object-sink.hh rename to src/libutil-test-support/tests/tracing-file-system-object-sink.hh diff --git a/src/libutil-tests/.version b/src/libutil-tests/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/libutil-tests/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/tests/unit/libutil/args.cc b/src/libutil-tests/args.cc similarity index 100% rename from tests/unit/libutil/args.cc rename to src/libutil-tests/args.cc diff --git a/src/libutil-tests/build-utils-meson b/src/libutil-tests/build-utils-meson new file mode 120000 index 000000000..5fff21bab --- /dev/null +++ b/src/libutil-tests/build-utils-meson @@ -0,0 +1 @@ +../../build-utils-meson \ No newline at end of file diff --git a/tests/unit/libutil/canon-path.cc b/src/libutil-tests/canon-path.cc similarity index 100% rename from tests/unit/libutil/canon-path.cc rename to src/libutil-tests/canon-path.cc diff --git a/tests/unit/libutil/checked-arithmetic.cc b/src/libutil-tests/checked-arithmetic.cc similarity index 100% rename from tests/unit/libutil/checked-arithmetic.cc rename to src/libutil-tests/checked-arithmetic.cc diff --git a/tests/unit/libutil/chunked-vector.cc b/src/libutil-tests/chunked-vector.cc similarity index 100% rename from tests/unit/libutil/chunked-vector.cc rename to src/libutil-tests/chunked-vector.cc diff --git a/tests/unit/libutil/closure.cc b/src/libutil-tests/closure.cc similarity index 100% rename from tests/unit/libutil/closure.cc rename to src/libutil-tests/closure.cc diff --git a/tests/unit/libutil/compression.cc b/src/libutil-tests/compression.cc similarity index 100% rename from tests/unit/libutil/compression.cc rename to src/libutil-tests/compression.cc diff --git a/tests/unit/libutil/config.cc b/src/libutil-tests/config.cc similarity index 100% rename from tests/unit/libutil/config.cc rename to src/libutil-tests/config.cc diff --git a/tests/unit/libutil/data/git/check-data.sh b/src/libutil-tests/data/git/check-data.sh similarity index 100% rename from tests/unit/libutil/data/git/check-data.sh rename to src/libutil-tests/data/git/check-data.sh diff --git a/tests/unit/libutil/data/git/hello-world-blob.bin b/src/libutil-tests/data/git/hello-world-blob.bin similarity index 100% rename from tests/unit/libutil/data/git/hello-world-blob.bin rename to src/libutil-tests/data/git/hello-world-blob.bin diff --git a/tests/unit/libutil/data/git/hello-world.bin b/src/libutil-tests/data/git/hello-world.bin similarity index 100% rename from tests/unit/libutil/data/git/hello-world.bin rename to src/libutil-tests/data/git/hello-world.bin diff --git a/tests/unit/libutil/data/git/tree.bin b/src/libutil-tests/data/git/tree.bin similarity index 100% rename from tests/unit/libutil/data/git/tree.bin rename to src/libutil-tests/data/git/tree.bin diff --git a/tests/unit/libutil/data/git/tree.txt b/src/libutil-tests/data/git/tree.txt similarity index 100% rename from tests/unit/libutil/data/git/tree.txt rename to src/libutil-tests/data/git/tree.txt diff --git a/tests/unit/libutil/executable-path.cc b/src/libutil-tests/executable-path.cc similarity index 100% rename from tests/unit/libutil/executable-path.cc rename to src/libutil-tests/executable-path.cc diff --git a/tests/unit/libutil/file-content-address.cc b/src/libutil-tests/file-content-address.cc similarity index 100% rename from tests/unit/libutil/file-content-address.cc rename to src/libutil-tests/file-content-address.cc diff --git a/tests/unit/libutil/file-system.cc b/src/libutil-tests/file-system.cc similarity index 75% rename from tests/unit/libutil/file-system.cc rename to src/libutil-tests/file-system.cc index cfddaae1c..7ef804f34 100644 --- a/tests/unit/libutil/file-system.cc +++ b/src/libutil-tests/file-system.cc @@ -12,8 +12,8 @@ #include #ifdef _WIN32 -# define FS_SEP "\\" -# define FS_ROOT "C:" FS_SEP // Need a mounted one, C drive is likely +# define FS_SEP L"\\" +# define FS_ROOT L"C:" FS_SEP // Need a mounted one, C drive is likely #else # define FS_SEP "/" # define FS_ROOT FS_SEP @@ -23,6 +23,12 @@ # define PATH_MAX 4096 #endif +#ifdef _WIN32 +# define GET_CWD _wgetcwd +#else +# define GET_CWD getcwd +#endif + namespace nix { /* ----------- tests for file-system.hh -------------------------------------*/ @@ -33,34 +39,34 @@ namespace nix { TEST(absPath, doesntChangeRoot) { - auto p = absPath(FS_ROOT); + auto p = absPath(std::filesystem::path{FS_ROOT}); ASSERT_EQ(p, FS_ROOT); } TEST(absPath, turnsEmptyPathIntoCWD) { - char cwd[PATH_MAX + 1]; - auto p = absPath(""); + OsChar cwd[PATH_MAX + 1]; + auto p = absPath(std::filesystem::path{""}); - ASSERT_EQ(p, getcwd((char *) &cwd, PATH_MAX)); + ASSERT_EQ(p, GET_CWD((OsChar *) &cwd, PATH_MAX)); } TEST(absPath, usesOptionalBasePathWhenGiven) { - char _cwd[PATH_MAX + 1]; - char * cwd = getcwd((char *) &_cwd, PATH_MAX); + OsChar _cwd[PATH_MAX + 1]; + OsChar * cwd = GET_CWD((OsChar *) &_cwd, PATH_MAX); - auto p = absPath("", cwd); + auto p = absPath(std::filesystem::path{""}.string(), std::filesystem::path{cwd}.string()); - ASSERT_EQ(p, cwd); + ASSERT_EQ(p, std::filesystem::path{cwd}.string()); } TEST(absPath, isIdempotent) { - char _cwd[PATH_MAX + 1]; - char * cwd = getcwd((char *) &_cwd, PATH_MAX); - auto p1 = absPath(cwd); + OsChar _cwd[PATH_MAX + 1]; + OsChar * cwd = GET_CWD((OsChar *) &_cwd, PATH_MAX); + auto p1 = absPath(std::filesystem::path{cwd}); auto p2 = absPath(p1); ASSERT_EQ(p1, p2); @@ -68,8 +74,8 @@ TEST(absPath, isIdempotent) TEST(absPath, pathIsCanonicalised) { - auto path = FS_ROOT "some/path/with/trailing/dot/."; - auto p1 = absPath(path); + auto path = FS_ROOT OS_STR("some/path/with/trailing/dot/."); + auto p1 = absPath(std::filesystem::path{path}); auto p2 = absPath(p1); ASSERT_EQ(p1, FS_ROOT "some" FS_SEP "path" FS_SEP "with" FS_SEP "trailing" FS_SEP "dot"); @@ -82,26 +88,26 @@ TEST(absPath, pathIsCanonicalised) TEST(canonPath, removesTrailingSlashes) { - auto path = FS_ROOT "this/is/a/path//"; - auto p = canonPath(path); + std::filesystem::path path = FS_ROOT "this/is/a/path//"; + auto p = canonPath(path.string()); - ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"); + ASSERT_EQ(p, std::filesystem::path{FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"}.string()); } TEST(canonPath, removesDots) { - auto path = FS_ROOT "this/./is/a/path/./"; - auto p = canonPath(path); + std::filesystem::path path = FS_ROOT "this/./is/a/path/./"; + auto p = canonPath(path.string()); - ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"); + ASSERT_EQ(p, std::filesystem::path{FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"}.string()); } TEST(canonPath, removesDots2) { - auto path = FS_ROOT "this/a/../is/a////path/foo/.."; - auto p = canonPath(path); + std::filesystem::path path = FS_ROOT "this/a/../is/a////path/foo/.."; + auto p = canonPath(path.string()); - ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"); + ASSERT_EQ(p, std::filesystem::path{FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"}.string()); } TEST(canonPath, requiresAbsolutePath) @@ -243,7 +249,7 @@ TEST(isDirOrInDir, DISABLED_shouldWork) TEST(pathExists, rootExists) { - ASSERT_TRUE(pathExists(FS_ROOT)); + ASSERT_TRUE(pathExists(std::filesystem::path{FS_ROOT}.string())); } TEST(pathExists, cwdExists) diff --git a/tests/unit/libutil/git.cc b/src/libutil-tests/git.cc similarity index 96% rename from tests/unit/libutil/git.cc rename to src/libutil-tests/git.cc index 3d01d9806..048956a58 100644 --- a/tests/unit/libutil/git.cc +++ b/src/libutil-tests/git.cc @@ -11,12 +11,12 @@ using namespace git; class GitTest : public CharacterizationTest { - Path unitTestData = getUnitTestData() + "/git"; + std::filesystem::path unitTestData = getUnitTestData() / "git"; public: - Path goldenMaster(std::string_view testStem) const override { - return unitTestData + "/" + testStem; + std::filesystem::path goldenMaster(std::string_view testStem) const override { + return unitTestData / std::string(testStem); } /** @@ -88,7 +88,7 @@ TEST_F(GitTest, blob_write) { /** * This data is for "shallow" tree tests. However, we use "real" hashes * so that we can check our test data in a small shell script test test - * (`tests/unit/libutil/data/git/check-data.sh`). + * (`src/libutil-tests/data/git/check-data.sh`). */ const static Tree tree = { { diff --git a/tests/unit/libutil/hash.cc b/src/libutil-tests/hash.cc similarity index 100% rename from tests/unit/libutil/hash.cc rename to src/libutil-tests/hash.cc diff --git a/tests/unit/libutil/hilite.cc b/src/libutil-tests/hilite.cc similarity index 100% rename from tests/unit/libutil/hilite.cc rename to src/libutil-tests/hilite.cc diff --git a/tests/unit/libutil/json-utils.cc b/src/libutil-tests/json-utils.cc similarity index 100% rename from tests/unit/libutil/json-utils.cc rename to src/libutil-tests/json-utils.cc diff --git a/src/libutil-tests/local.mk b/src/libutil-tests/local.mk new file mode 100644 index 000000000..c747863a4 --- /dev/null +++ b/src/libutil-tests/local.mk @@ -0,0 +1,37 @@ +check: libutil-tests_RUN + +programs += libutil-tests + +libutil-tests_NAME = libnixutil-tests + +libutil-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libutil-tests.xml + +libutil-tests_DIR := $(d) + +ifeq ($(INSTALL_UNIT_TESTS), yes) + libutil-tests_INSTALL_DIR := $(checkbindir) +else + libutil-tests_INSTALL_DIR := +endif + +libutil-tests_SOURCES := $(wildcard $(d)/*.cc) + +libutil-tests_EXTRA_INCLUDES = \ + -I src/libutil-test-support \ + $(INCLUDE_libutil) \ + $(INCLUDE_libutilc) + +libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) + +libutil-tests_LIBS = libutil-test-support libutil libutilc + +libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) + +ifdef HOST_WINDOWS + # Increase the default reserved stack size to 65 MB so Nix doesn't run out of space + libutil-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024))) +endif + +check: $(d)/data/git/check-data.sh.test + +$(eval $(call run-test,$(d)/data/git/check-data.sh)) diff --git a/tests/unit/libutil/logging.cc b/src/libutil-tests/logging.cc similarity index 100% rename from tests/unit/libutil/logging.cc rename to src/libutil-tests/logging.cc diff --git a/tests/unit/libutil/lru-cache.cc b/src/libutil-tests/lru-cache.cc similarity index 100% rename from tests/unit/libutil/lru-cache.cc rename to src/libutil-tests/lru-cache.cc diff --git a/tests/unit/libutil/meson.build b/src/libutil-tests/meson.build similarity index 95% rename from tests/unit/libutil/meson.build rename to src/libutil-tests/meson.build index 83cec13ec..5c3b5e5a3 100644 --- a/tests/unit/libutil/meson.build +++ b/src/libutil-tests/meson.build @@ -28,6 +28,7 @@ subdir('build-utils-meson/subprojects') subdir('build-utils-meson/threads') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') rapidcheck = dependency('rapidcheck') deps_private += rapidcheck @@ -48,12 +49,14 @@ subdir('build-utils-meson/diagnostics') sources = files( 'args.cc', 'canon-path.cc', + 'checked-arithmetic.cc', 'chunked-vector.cc', 'closure.cc', 'compression.cc', 'config.cc', 'executable-path.cc', 'file-content-address.cc', + 'file-system.cc', 'git.cc', 'hash.cc', 'hilite.cc', @@ -62,6 +65,7 @@ sources = files( 'lru-cache.cc', 'nix_api_util.cc', 'pool.cc', + 'position.cc', 'processes.cc', 'references.cc', 'spawn.cc', diff --git a/tests/unit/libutil/nix_api_util.cc b/src/libutil-tests/nix_api_util.cc similarity index 84% rename from tests/unit/libutil/nix_api_util.cc rename to src/libutil-tests/nix_api_util.cc index 2b7e38225..b36f71042 100644 --- a/tests/unit/libutil/nix_api_util.cc +++ b/src/libutil-tests/nix_api_util.cc @@ -7,6 +7,8 @@ #include +#include + namespace nixC { TEST_F(nix_api_util_context, nix_context_error) @@ -31,6 +33,9 @@ TEST_F(nix_api_util_context, nix_context_error) } ASSERT_EQ(ctx->last_err_code, NIX_ERR_UNKNOWN); ASSERT_EQ(*ctx->last_err, err_msg_ref); + + nix_clear_err(ctx); + ASSERT_EQ(ctx->last_err_code, NIX_OK); } TEST_F(nix_api_util_context, nix_set_err_msg) @@ -54,6 +59,14 @@ struct MySettings : nix::Config MySettings mySettings; static nix::GlobalConfig::Register rs(&mySettings); +static auto createOwnedNixContext() +{ + return std::unique_ptr(nix_c_context_create(), {}); +} + TEST_F(nix_api_util_context, nix_setting_get) { ASSERT_EQ(ctx->last_err_code, NIX_OK); @@ -94,7 +107,8 @@ TEST_F(nix_api_util_context, nix_err_msg) // advanced usage unsigned int sz; - err_msg = nix_err_msg(nix_c_context_create(), ctx, &sz); + auto new_ctx = createOwnedNixContext(); + err_msg = nix_err_msg(new_ctx.get(), ctx, &sz); ASSERT_EQ(sz, err_msg.size()); } @@ -110,7 +124,8 @@ TEST_F(nix_api_util_context, nix_err_info_msg) } catch (...) { nix_context_error(ctx); } - nix_err_info_msg(nix_c_context_create(), ctx, OBSERVE_STRING(err_info)); + auto new_ctx = createOwnedNixContext(); + nix_err_info_msg(new_ctx.get(), ctx, OBSERVE_STRING(err_info)); ASSERT_STREQ("testing error", err_info.c_str()); } @@ -127,7 +142,8 @@ TEST_F(nix_api_util_context, nix_err_name) } catch (...) { nix_context_error(ctx); } - nix_err_name(nix_c_context_create(), ctx, OBSERVE_STRING(err_name)); + auto new_ctx = createOwnedNixContext(); + nix_err_name(new_ctx.get(), ctx, OBSERVE_STRING(err_name)); ASSERT_EQ(std::string(err_name), "nix::Error"); } diff --git a/tests/unit/libutil/package.nix b/src/libutil-tests/package.nix similarity index 80% rename from tests/unit/libutil/package.nix rename to src/libutil-tests/package.nix index 2fce5bfa8..b099037ee 100644 --- a/tests/unit/libutil/package.nix +++ b/src/libutil-tests/package.nix @@ -1,12 +1,7 @@ { lib , buildPackages , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonExecutable , nix-util , nix-util-c @@ -25,15 +20,15 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonExecutable (finalAttrs: { pname = "nix-util-tests"; inherit version; workDir = ./.; fileset = fileset.unions [ - ../../../build-utils-meson + ../../build-utils-meson ./build-utils-meson - ../../../.version + ../../.version ./.version ./meson.build # ./meson.options @@ -41,12 +36,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ nix-util nix-util-c @@ -60,7 +49,7 @@ mkMesonDerivation (finalAttrs: { # Do the meson utils, without modification. '' chmod u+w ./.version - echo ${version} > ../../../.version + echo ${version} > ../../.version ''; mesonFlags = [ @@ -70,10 +59,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru = { tests = { run = runCommand "${finalAttrs.pname}-run" { diff --git a/tests/unit/libutil/pool.cc b/src/libutil-tests/pool.cc similarity index 100% rename from tests/unit/libutil/pool.cc rename to src/libutil-tests/pool.cc diff --git a/tests/unit/libutil/position.cc b/src/libutil-tests/position.cc similarity index 100% rename from tests/unit/libutil/position.cc rename to src/libutil-tests/position.cc diff --git a/tests/unit/libutil/processes.cc b/src/libutil-tests/processes.cc similarity index 100% rename from tests/unit/libutil/processes.cc rename to src/libutil-tests/processes.cc diff --git a/tests/unit/libutil/references.cc b/src/libutil-tests/references.cc similarity index 100% rename from tests/unit/libutil/references.cc rename to src/libutil-tests/references.cc diff --git a/tests/unit/libutil/spawn.cc b/src/libutil-tests/spawn.cc similarity index 100% rename from tests/unit/libutil/spawn.cc rename to src/libutil-tests/spawn.cc diff --git a/tests/unit/libutil/strings.cc b/src/libutil-tests/strings.cc similarity index 100% rename from tests/unit/libutil/strings.cc rename to src/libutil-tests/strings.cc diff --git a/tests/unit/libutil/suggestions.cc b/src/libutil-tests/suggestions.cc similarity index 100% rename from tests/unit/libutil/suggestions.cc rename to src/libutil-tests/suggestions.cc diff --git a/tests/unit/libutil/terminal.cc b/src/libutil-tests/terminal.cc similarity index 100% rename from tests/unit/libutil/terminal.cc rename to src/libutil-tests/terminal.cc diff --git a/tests/unit/libutil/url.cc b/src/libutil-tests/url.cc similarity index 100% rename from tests/unit/libutil/url.cc rename to src/libutil-tests/url.cc diff --git a/tests/unit/libutil/util.cc b/src/libutil-tests/util.cc similarity index 100% rename from tests/unit/libutil/util.cc rename to src/libutil-tests/util.cc diff --git a/tests/unit/libutil/xml-writer.cc b/src/libutil-tests/xml-writer.cc similarity index 100% rename from tests/unit/libutil/xml-writer.cc rename to src/libutil-tests/xml-writer.cc diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index e2ebcda0c..20d8a1e09 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -23,7 +23,7 @@ struct ArchiveSettings : Config false, #endif "use-case-hack", - "Whether to enable a Darwin-specific hack for dealing with file name collisions."}; + "Whether to enable a macOS-specific hack for dealing with file name case collisions."}; }; static ArchiveSettings archiveSettings; @@ -82,7 +82,7 @@ void SourceAccessor::dumpPath( name.erase(pos); } if (!unhacked.emplace(name, i.first).second) - throw Error("file name collision in between '%s' and '%s'", + throw Error("file name collision between '%s' and '%s'", (path / unhacked[name]), (path / i.first)); } else @@ -128,9 +128,10 @@ void dumpString(std::string_view s, Sink & sink) } -static SerialisationError badArchive(const std::string & s) +template +static SerialisationError badArchive(std::string_view s, const Args & ... args) { - return SerialisationError("bad archive: " + s); + return SerialisationError("bad archive: " + s, args...); } @@ -167,115 +168,97 @@ struct CaseInsensitiveCompare static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath & path) { - std::string s; - - s = readString(source); - if (s != "(") throw badArchive("expected open tag"); - - std::map names; - auto getString = [&]() { checkInterrupt(); return readString(source); }; - // For first iteration - s = getString(); + auto expectTag = [&](std::string_view expected) { + auto tag = getString(); + if (tag != expected) + throw badArchive("expected tag '%s', got '%s'", expected, tag); + }; - while (1) { + expectTag("("); - if (s == ")") { - break; - } + expectTag("type"); - else if (s == "type") { - std::string t = getString(); + auto type = getString(); - if (t == "regular") { - sink.createRegularFile(path, [&](auto & crf) { - while (1) { - s = getString(); + if (type == "regular") { + sink.createRegularFile(path, [&](auto & crf) { + auto tag = getString(); - if (s == "contents") { - parseContents(crf, source); - } - - else if (s == "executable") { - auto s2 = getString(); - if (s2 != "") throw badArchive("executable marker has non-empty value"); - crf.isExecutable(); - } - - else break; - } - }); + if (tag == "executable") { + auto s2 = getString(); + if (s2 != "") throw badArchive("executable marker has non-empty value"); + crf.isExecutable(); + tag = getString(); } - else if (t == "directory") { - sink.createDirectory(path); + if (tag == "contents") + parseContents(crf, source); - while (1) { - s = getString(); - - if (s == "entry") { - std::string name, prevName; - - s = getString(); - if (s != "(") throw badArchive("expected open tag"); - - while (1) { - s = getString(); - - if (s == ")") { - break; - } else if (s == "name") { - name = getString(); - if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) - throw Error("NAR contains invalid file name '%1%'", name); - if (name <= prevName) - throw Error("NAR directory is not sorted"); - prevName = name; - if (archiveSettings.useCaseHack) { - auto i = names.find(name); - if (i != names.end()) { - debug("case collision between '%1%' and '%2%'", i->first, name); - name += caseHackSuffix; - name += std::to_string(++i->second); - } else - names[name] = 0; - } - } else if (s == "node") { - if (name.empty()) throw badArchive("entry name missing"); - parse(sink, source, path / name); - } else - throw badArchive("unknown field " + s); - } - } - - else break; - } - } - - else if (t == "symlink") { - s = getString(); - - if (s != "target") - throw badArchive("expected 'target' got " + s); - - std::string target = getString(); - sink.createSymlink(path, target); - - // for the next iteration - s = getString(); - } - - else throw badArchive("unknown file type " + t); - - } - - else - throw badArchive("unknown field " + s); + expectTag(")"); + }); } + + else if (type == "directory") { + sink.createDirectory(path); + + std::map names; + + std::string prevName; + + while (1) { + auto tag = getString(); + + if (tag == ")") break; + + if (tag != "entry") + throw badArchive("expected tag 'entry' or ')', got '%s'", tag); + + expectTag("("); + + expectTag("name"); + + auto name = getString(); + if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) + throw badArchive("NAR contains invalid file name '%1%'", name); + if (name <= prevName) + throw badArchive("NAR directory is not sorted"); + prevName = name; + if (archiveSettings.useCaseHack) { + auto i = names.find(name); + if (i != names.end()) { + debug("case collision between '%1%' and '%2%'", i->first, name); + name += caseHackSuffix; + name += std::to_string(++i->second); + auto j = names.find(name); + if (j != names.end()) + throw badArchive("NAR contains file name '%s' that collides with case-hacked file name '%s'", prevName, j->first); + } else + names[name] = 0; + } + + expectTag("node"); + + parse(sink, source, path / name); + + expectTag(")"); + } + } + + else if (type == "symlink") { + expectTag("target"); + + auto target = getString(); + sink.createSymlink(path, target); + + expectTag(")"); + } + + else throw badArchive("unknown file type '%s'", type); } @@ -294,9 +277,9 @@ void parseDump(FileSystemObjectSink & sink, Source & source) } -void restorePath(const std::filesystem::path & path, Source & source) +void restorePath(const std::filesystem::path & path, Source & source, bool startFsync) { - RestoreSink sink; + RestoreSink sink{startFsync}; sink.dstPath = path; parseDump(sink, source); } diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 2da8f0cb1..c38fa8a46 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -75,7 +75,7 @@ void dumpString(std::string_view s, Sink & sink); void parseDump(FileSystemObjectSink & sink, Source & source); -void restorePath(const std::filesystem::path & path, Source & source); +void restorePath(const std::filesystem::path & path, Source & source, bool startFsync = false); /** * Read a NAR from 'source' and write it to 'sink'. diff --git a/src/libutil/args.cc b/src/libutil/args.cc index d58f4b4ae..4e87389d6 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -293,7 +293,7 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) // We match one space after `nix` so that we preserve indentation. // No space is necessary for an empty line. An empty line has basically no effect. if (std::regex_match(line, match, std::regex("^#!\\s*nix(:? |$)(.*)$"))) - shebangContent += match[2].str() + "\n"; + shebangContent += std::string_view{match[2].first, match[2].second} + "\n"; } for (const auto & word : parseShebangContent(shebangContent)) { cmdline.push_back(word); diff --git a/src/libutil/args.hh b/src/libutil/args.hh index c0236ee3d..127a0809e 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -2,6 +2,7 @@ ///@file #include +#include #include #include #include @@ -113,6 +114,16 @@ protected: , arity(1) { } + Handler(std::filesystem::path * dest) + : fun([dest](std::vector ss) { *dest = ss[0]; }) + , arity(1) + { } + + Handler(std::optional * dest) + : fun([dest](std::vector ss) { *dest = ss[0]; }) + , arity(1) + { } + template Handler(T * dest, const T & val) : fun([dest, val](std::vector ss) { *dest = val; }) @@ -283,6 +294,18 @@ public: }); } + /** + * Expect a path argument. + */ + void expectArg(const std::string & label, std::filesystem::path * dest, bool optional = false) + { + expectArgs({ + .label = label, + .optional = optional, + .handler = {dest} + }); + } + /** * Expect 0 or more arguments. */ diff --git a/src/libutil/callback.hh b/src/libutil/callback.hh index 3710d1239..26c386d80 100644 --- a/src/libutil/callback.hh +++ b/src/libutil/callback.hh @@ -21,7 +21,9 @@ public: Callback(std::function)> fun) : fun(fun) { } - Callback(Callback && callback) : fun(std::move(callback.fun)) + // NOTE: std::function is noexcept move-constructible since C++20. + Callback(Callback && callback) noexcept(std::is_nothrow_move_constructible_v) + : fun(std::move(callback.fun)) { auto prev = callback.done.test_and_set(); if (prev) done.test_and_set(); diff --git a/src/libutil/config-global.cc b/src/libutil/config-global.cc index 4c08898a4..3ed1dd1d3 100644 --- a/src/libutil/config-global.cc +++ b/src/libutil/config-global.cc @@ -1,5 +1,7 @@ #include "config-global.hh" +#include + namespace nix { bool GlobalConfig::set(const std::string & name, const std::string & value) diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 0bc46d746..ac01f441e 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -32,11 +32,7 @@ unsigned int getMaxCPU() auto cgroupFS = getCgroupFS(); if (!cgroupFS) return 0; - auto cgroups = getCgroups("/proc/self/cgroup"); - auto cgroup = cgroups[""]; - if (cgroup == "") return 0; - - auto cpuFile = *cgroupFS + "/" + cgroup + "/cpu.max"; + auto cpuFile = *cgroupFS + "/" + getCurrentCgroup() + "/cpu.max"; auto cpuMax = readFile(cpuFile); auto cpuMaxParts = tokenizeString>(cpuMax, " \n"); @@ -49,7 +45,7 @@ unsigned int getMaxCPU() auto period = cpuMaxParts[1]; if (quota != "max") return std::ceil(std::stoi(quota) / std::stof(period)); - } catch (Error &) { ignoreException(lvlDebug); } + } catch (Error &) { ignoreExceptionInDestructor(lvlDebug); } #endif return 0; diff --git a/src/libutil/environment-variables.hh b/src/libutil/environment-variables.hh index 879e1f304..1a95f5c97 100644 --- a/src/libutil/environment-variables.hh +++ b/src/libutil/environment-variables.hh @@ -13,6 +13,8 @@ namespace nix { +static constexpr auto environmentVariablesCategory = "Options that change environment variables"; + /** * @return an environment variable. */ diff --git a/src/libutil/exec.hh b/src/libutil/exec.hh new file mode 100644 index 000000000..cbbe80c4e --- /dev/null +++ b/src/libutil/exec.hh @@ -0,0 +1,15 @@ +#pragma once + +#include "os-string.hh" + +namespace nix { + +/** + * `execvpe` is a GNU extension, so we need to implement it for other POSIX + * platforms. + * + * We use our own implementation unconditionally for consistency. + */ +int execvpe(const OsChar * file0, const OsChar * const argv[], const OsChar * const envp[]); + +} diff --git a/src/libutil/executable-path.cc b/src/libutil/executable-path.cc index 8005a19be..ebd522a41 100644 --- a/src/libutil/executable-path.cc +++ b/src/libutil/executable-path.cc @@ -6,7 +6,9 @@ namespace nix { -namespace fs = std::filesystem; +namespace fs { +using namespace std::filesystem; +} constexpr static const OsStringView path_var_separator{ &ExecutablePath::separator, @@ -24,7 +26,7 @@ ExecutablePath ExecutablePath::load() ExecutablePath ExecutablePath::parse(const OsString & path) { auto strings = path.empty() ? (std::list{}) - : basicSplitString, OsString::value_type>(path, path_var_separator); + : basicSplitString, OsChar>(path, path_var_separator); std::vector ret; ret.reserve(strings.size()); @@ -33,7 +35,7 @@ ExecutablePath ExecutablePath::parse(const OsString & path) std::make_move_iterator(strings.begin()), std::make_move_iterator(strings.end()), std::back_inserter(ret), - [](auto && str) { + [](OsString && str) { return fs::path{ str.empty() // "A zero-length prefix is a legacy feature that @@ -54,6 +56,7 @@ ExecutablePath ExecutablePath::parse(const OsString & path) OsString ExecutablePath::render() const { std::vector path2; + path2.reserve(directories.size()); for (auto & p : directories) path2.push_back(p.native()); return basicConcatStringsSep(path_var_separator, path2); @@ -86,7 +89,7 @@ fs::path ExecutablePath::findPath(const fs::path & exe, std::function directories; - constexpr static const OsString::value_type separator = + constexpr static const OsChar separator = #ifdef WIN32 L';' #else diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index e65e51280..412bf0886 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -2,9 +2,10 @@ ///@file #include "error.hh" -#include "json-utils.hh" #include "types.hh" +#include + namespace nix { /** @@ -98,10 +99,4 @@ public: void to_json(nlohmann::json &, const ExperimentalFeature &); void from_json(const nlohmann::json &, ExperimentalFeature &); -/** - * It is always rendered as a string - */ -template<> -struct json_avoids_null : std::true_type {}; - } diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc index 86378dd67..69301d9c8 100644 --- a/src/libutil/file-content-address.cc +++ b/src/libutil/file-content-address.cc @@ -88,14 +88,15 @@ void dumpPath( void restorePath( const Path & path, Source & source, - FileSerialisationMethod method) + FileSerialisationMethod method, + bool startFsync) { switch (method) { case FileSerialisationMethod::Flat: - writeFile(path, source); + writeFile(path, source, 0666, startFsync); break; case FileSerialisationMethod::NixArchive: - restorePath(path, source); + restorePath(path, source, startFsync); break; } } diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh index ec42d3d34..0c584ea8a 100644 --- a/src/libutil/file-content-address.hh +++ b/src/libutil/file-content-address.hh @@ -70,7 +70,8 @@ void dumpPath( void restorePath( const Path & path, Source & source, - FileSerialisationMethod method); + FileSerialisationMethod method, + bool startFsync = false); /** diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 3bbfc50ee..542c33f3b 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -2,6 +2,7 @@ #include "signals.hh" #include "finally.hh" #include "serialise.hh" +#include "util.hh" #include #include @@ -44,8 +45,9 @@ AutoCloseFD::AutoCloseFD() : fd{INVALID_DESCRIPTOR} {} AutoCloseFD::AutoCloseFD(Descriptor fd) : fd{fd} {} - -AutoCloseFD::AutoCloseFD(AutoCloseFD && that) : fd{that.fd} +// NOTE: This can be noexcept since we are just copying a value and resetting +// the file descriptor in the rhs. +AutoCloseFD::AutoCloseFD(AutoCloseFD && that) noexcept : fd{that.fd} { that.fd = INVALID_DESCRIPTOR; } @@ -65,7 +67,7 @@ AutoCloseFD::~AutoCloseFD() try { close(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -92,7 +94,7 @@ void AutoCloseFD::close() } } -void AutoCloseFD::fsync() +void AutoCloseFD::fsync() const { if (fd != INVALID_DESCRIPTOR) { int result; @@ -111,6 +113,18 @@ void AutoCloseFD::fsync() } + +void AutoCloseFD::startFsync() const +{ +#if __linux__ + if (fd != -1) { + /* Ignore failure, since fsync must be run later anyway. This is just a performance optimization. */ + ::sync_file_range(fd, 0, 0, SYNC_FILE_RANGE_WRITE); + } +#endif +} + + AutoCloseFD::operator bool() const { return fd != INVALID_DESCRIPTOR; diff --git a/src/libutil/file-descriptor.hh b/src/libutil/file-descriptor.hh index be61375f6..fde362999 100644 --- a/src/libutil/file-descriptor.hh +++ b/src/libutil/file-descriptor.hh @@ -77,8 +77,13 @@ void writeFull(Descriptor fd, std::string_view s, bool allowInterrupts = true); /** * Read a line from a file descriptor. + * + * @param fd The file descriptor to read from + * @param eofOk If true, return an unterminated line if EOF is reached. (e.g. the empty string) + * + * @return A line of text ending in `\n`, or a string without `\n` if `eofOk` is true and EOF is reached. */ -std::string readLine(Descriptor fd); +std::string readLine(Descriptor fd, bool eofOk = false); /** * Write a line to a file descriptor. @@ -101,8 +106,25 @@ void drainFD( #endif ); +/** + * Get [Standard Input](https://en.wikipedia.org/wiki/Standard_streams#Standard_input_(stdin)) + */ [[gnu::always_inline]] -inline Descriptor getStandardOut() { +inline Descriptor getStandardInput() +{ +#ifndef _WIN32 + return STDIN_FILENO; +#else + return GetStdHandle(STD_INPUT_HANDLE); +#endif +} + +/** + * Get [Standard Output](https://en.wikipedia.org/wiki/Standard_streams#Standard_output_(stdout)) + */ +[[gnu::always_inline]] +inline Descriptor getStandardOutput() +{ #ifndef _WIN32 return STDOUT_FILENO; #else @@ -110,6 +132,19 @@ inline Descriptor getStandardOut() { #endif } +/** + * Get [Standard Error](https://en.wikipedia.org/wiki/Standard_streams#Standard_error_(stderr)) + */ +[[gnu::always_inline]] +inline Descriptor getStandardError() +{ +#ifndef _WIN32 + return STDERR_FILENO; +#else + return GetStdHandle(STD_ERROR_HANDLE); +#endif +} + /** * Automatic cleanup of resources. */ @@ -120,7 +155,7 @@ public: AutoCloseFD(); AutoCloseFD(Descriptor fd); AutoCloseFD(const AutoCloseFD & fd) = delete; - AutoCloseFD(AutoCloseFD&& fd); + AutoCloseFD(AutoCloseFD&& fd) noexcept; ~AutoCloseFD(); AutoCloseFD& operator =(const AutoCloseFD & fd) = delete; AutoCloseFD& operator =(AutoCloseFD&& fd); @@ -128,7 +163,18 @@ public: explicit operator bool() const; Descriptor release(); void close(); - void fsync(); + + /** + * Perform a blocking fsync operation. + */ + void fsync() const; + + /** + * Asynchronously flush to disk without blocking, if available on + * the platform. This is just a performance optimization, and + * fsync must be run later even if this is called. + */ + void startFsync() const; }; class Pipe @@ -143,10 +189,10 @@ public: namespace unix { /** - * Close all file descriptors except those listed in the given set. + * Close all file descriptors except stdio fds (ie 0, 1, 2). * Good practice in child processes. */ -void closeMostFDs(const std::set & exceptions); +void closeExtraFDs(); /** * Set the close-on-exec flag for the given file descriptor. diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 536ae29ab..fd51d7d3c 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -5,12 +5,14 @@ #include "signals.hh" #include "finally.hh" #include "serialise.hh" +#include "util.hh" #include #include #include #include #include +#include #include #include @@ -25,10 +27,10 @@ #include "strings-inline.hh" -namespace fs = std::filesystem; - namespace nix { +namespace fs { using namespace std::filesystem; } + /** * Treat the string as possibly an absolute path, by inspecting the * start of it. Return whether it was probably intended to be @@ -72,6 +74,10 @@ Path absPath(PathView path, std::optional dir, bool resolveSymlinks) return canonPath(path, resolveSymlinks); } +std::filesystem::path absPath(const std::filesystem::path & path, bool resolveSymlinks) +{ + return absPath(path.string(), std::nullopt, resolveSymlinks); +} Path canonPath(PathView path, bool resolveSymlinks) { @@ -205,10 +211,10 @@ bool pathExists(const Path & path) return maybeLstat(path).has_value(); } -bool pathAccessible(const Path & path) +bool pathAccessible(const std::filesystem::path & path) { try { - return pathExists(path); + return pathExists(path.string()); } catch (SysError & e) { // swallow EPERM if (e.errNo == EPERM) return false; @@ -237,6 +243,11 @@ std::string readFile(const Path & path) return readFile(fd.get()); } +std::string readFile(const std::filesystem::path & path) +{ + return readFile(os_string_to_string(PathViewNG { path })); +} + void readFile(const Path & path, Sink & sink) { @@ -318,6 +329,50 @@ void syncParent(const Path & path) } +void recursiveSync(const Path & path) +{ + /* If it's a file, just fsync and return. */ + auto st = lstat(path); + if (S_ISREG(st.st_mode)) { + AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY, 0)); + if (!fd) + throw SysError("opening file '%1%'", path); + fd.fsync(); + return; + } + + /* Otherwise, perform a depth-first traversal of the directory and + fsync all the files. */ + std::deque dirsToEnumerate; + dirsToEnumerate.push_back(path); + std::vector dirsToFsync; + while (!dirsToEnumerate.empty()) { + auto currentDir = dirsToEnumerate.back(); + dirsToEnumerate.pop_back(); + for (auto & entry : std::filesystem::directory_iterator(currentDir)) { + auto st = entry.symlink_status(); + if (fs::is_directory(st)) { + dirsToEnumerate.emplace_back(entry.path()); + } else if (fs::is_regular_file(st)) { + AutoCloseFD fd = toDescriptor(open(entry.path().string().c_str(), O_RDONLY, 0)); + if (!fd) + throw SysError("opening file '%1%'", entry.path()); + fd.fsync(); + } + } + dirsToFsync.emplace_back(std::move(currentDir)); + } + + /* Fsync all the directories. */ + for (auto dir = dirsToFsync.rbegin(); dir != dirsToFsync.rend(); ++dir) { + AutoCloseFD fd = toDescriptor(open(dir->string().c_str(), O_RDONLY, 0)); + if (!fd) + throw SysError("opening directory '%1%'", *dir); + fd.fsync(); + } +} + + static void _deletePath(Descriptor parentfd, const fs::path & path, uint64_t & bytesFreed) { #ifndef _WIN32 @@ -463,7 +518,7 @@ AutoDelete::~AutoDelete() } } } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -550,19 +605,20 @@ void createSymlink(const Path & target, const Path & link) fs::create_symlink(target, link); } -void replaceSymlink(const Path & target, const Path & link) +void replaceSymlink(const fs::path & target, const fs::path & link) { for (unsigned int n = 0; true; n++) { - Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link))); + auto tmp = link.parent_path() / fs::path{fmt(".%d_%s", n, link.filename().string())}; + tmp = tmp.lexically_normal(); try { - createSymlink(target, tmp); + fs::create_symlink(target, tmp); } catch (fs::filesystem_error & e) { if (e.code() == std::errc::file_exists) continue; throw; } - std::filesystem::rename(tmp, link); + fs::rename(tmp, link); break; } @@ -574,7 +630,28 @@ void setWriteTime( time_t modificationTime, std::optional optIsSymlink) { -#ifndef _WIN32 +#ifdef _WIN32 + // FIXME use `fs::last_write_time`. + // + // Would be nice to use std::filesystem unconditionally, but + // doesn't support access time just modification time. + // + // System clock vs File clock issues also make that annoying. + warn("Changing file times is not yet implemented on Windows, path is '%s'", path); +#elif HAVE_UTIMENSAT && HAVE_DECL_AT_SYMLINK_NOFOLLOW + struct timespec times[2] = { + { + .tv_sec = accessedTime, + .tv_nsec = 0, + }, + { + .tv_sec = modificationTime, + .tv_nsec = 0, + }, + }; + if (utimensat(AT_FDCWD, path.c_str(), times, AT_SYMLINK_NOFOLLOW) == -1) + throw SysError("changing modification time of '%s' (using `utimensat`)", path); +#else struct timeval times[2] = { { .tv_sec = accessedTime, @@ -585,42 +662,21 @@ void setWriteTime( .tv_usec = 0, }, }; -#endif - - auto nonSymlink = [&]{ - bool isSymlink = optIsSymlink - ? *optIsSymlink - : fs::is_symlink(path); - - if (!isSymlink) { -#ifdef _WIN32 - // FIXME use `fs::last_write_time`. - // - // Would be nice to use std::filesystem unconditionally, but - // doesn't support access time just modification time. - // - // System clock vs File clock issues also make that annoying. - warn("Changing file times is not yet implemented on Windows, path is '%s'", path); -#else - if (utimes(path.c_str(), times) == -1) { - - throw SysError("changing modification time of '%s' (not a symlink)", path); - } -#endif - } else { - throw Error("Cannot modification time of symlink '%s'", path); - } - }; - #if HAVE_LUTIMES - if (lutimes(path.c_str(), times) == -1) { - if (errno == ENOSYS) - nonSymlink(); - else - throw SysError("changing modification time of '%s'", path); - } + if (lutimes(path.c_str(), times) == -1) + throw SysError("changing modification time of '%s'", path); #else - nonSymlink(); + bool isSymlink = optIsSymlink + ? *optIsSymlink + : fs::is_symlink(path); + + if (!isSymlink) { + if (utimes(path.c_str(), times) == -1) + throw SysError("changing modification time of '%s' (not a symlink)", path); + } else { + throw Error("Cannot modification time of symlink '%s'", path); + } +#endif #endif } diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh index 1ae5fa136..eb3e4ec66 100644 --- a/src/libutil/file-system.hh +++ b/src/libutil/file-system.hh @@ -46,16 +46,33 @@ struct Source; * @return An absolutized path, resolving paths relative to the * specified directory, or the current directory otherwise. The path * is also canonicalised. + * + * In the process of being deprecated for `std::filesystem::absolute`. */ Path absPath(PathView path, std::optional dir = {}, bool resolveSymlinks = false); +inline Path absPath(const Path & path, + std::optional dir = {}, + bool resolveSymlinks = false) +{ + return absPath(PathView{path}, dir, resolveSymlinks); +} + +std::filesystem::path absPath(const std::filesystem::path & path, + bool resolveSymlinks = false); + /** * Canonicalise a path by removing all `.` or `..` components and * double or trailing slashes. Optionally resolves all symlink * components such that each component of the resulting path is *not* * a symbolic link. + * + * In the process of being deprecated for + * `std::filesystem::path::lexically_normal` (for the `resolveSymlinks = + * false` case), and `std::filesystem::weakly_canonical` (for the + * `resolveSymlinks = true` case). */ Path canonPath(PathView path, bool resolveSymlinks = false); @@ -64,12 +81,18 @@ Path canonPath(PathView path, bool resolveSymlinks = false); * everything before the final `/`. If the path is the root or an * immediate child thereof (e.g., `/foo`), this means `/` * is returned. + * + * In the process of being deprecated for + * `std::filesystem::path::parent_path`. */ Path dirOf(const PathView path); /** * @return the base name of the given canonical path, i.e., everything * following the final `/` (trailing slashes are removed). + * + * In the process of being deprecated for + * `std::filesystem::path::filename`. */ std::string_view baseNameOf(std::string_view path); @@ -98,20 +121,42 @@ std::optional maybeLstat(const Path & path); /** * @return true iff the given path exists. + * + * In the process of being deprecated for `fs::symlink_exists`. */ bool pathExists(const Path & path); +namespace fs { + +/** + * ``` + * symlink_exists(p) = std::filesystem::exists(std::filesystem::symlink_status(p)) + * ``` + * Missing convenience analogous to + * ``` + * std::filesystem::exists(p) = std::filesystem::exists(std::filesystem::status(p)) + * ``` + */ +inline bool symlink_exists(const std::filesystem::path & path) { + return std::filesystem::exists(std::filesystem::symlink_status(path)); +} + +} // namespace fs + /** * A version of pathExists that returns false on a permission error. * Useful for inferring default paths across directories that might not * be readable. * @return true iff the given path can be accessed and exists */ -bool pathAccessible(const Path & path); +bool pathAccessible(const std::filesystem::path & path); /** * Read the contents (target) of a symbolic link. The result is not * in any way canonicalised. + * + * In the process of being deprecated for + * `std::filesystem::read_symlink`. */ Path readLink(const Path & path); @@ -124,20 +169,34 @@ Descriptor openDirectory(const std::filesystem::path & path); * Read the contents of a file into a string. */ std::string readFile(const Path & path); +std::string readFile(const std::filesystem::path & path); void readFile(const Path & path, Sink & sink); /** * Write a string to a file. */ void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, bool sync = false); +static inline void writeFile(const std::filesystem::path & path, std::string_view s, mode_t mode = 0666, bool sync = false) +{ + return writeFile(path.string(), s, mode, sync); +} void writeFile(const Path & path, Source & source, mode_t mode = 0666, bool sync = false); +static inline void writeFile(const std::filesystem::path & path, Source & source, mode_t mode = 0666, bool sync = false) +{ + return writeFile(path.string(), source, mode, sync); +} /** - * Flush a file's parent directory to disk + * Flush a path's parent directory to disk. */ void syncParent(const Path & path); +/** + * Flush a file or entire directory tree to disk. + */ +void recursiveSync(const Path & path); + /** * Delete a path; i.e., in the case of a directory, it is deleted * recursively. It's not an error if the path does not exist. The @@ -149,6 +208,9 @@ void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed); /** * Create a directory and all its parents, if necessary. + * + * In the process of being deprecated for + * `std::filesystem::create_directories`. */ void createDirs(const Path & path); inline void createDirs(PathView path) @@ -187,13 +249,21 @@ void setWriteTime(const std::filesystem::path & path, const struct stat & st); /** * Create a symlink. + * + * In the process of being deprecated for + * `std::filesystem::create_symlink`. */ void createSymlink(const Path & target, const Path & link); /** * Atomically create or replace a symlink. */ -void replaceSymlink(const Path & target, const Path & link); +void replaceSymlink(const std::filesystem::path & target, const std::filesystem::path & link); + +inline void replaceSymlink(const Path & target, const Path & link) +{ + return replaceSymlink(std::filesystem::path{target}, std::filesystem::path{link}); +} /** * Similar to 'renameFile', but fallback to a copy+remove if `src` and `dst` diff --git a/src/libutil/finally.hh b/src/libutil/finally.hh index bda4227e6..2b25010a1 100644 --- a/src/libutil/finally.hh +++ b/src/libutil/finally.hh @@ -20,7 +20,11 @@ public: // Copying Finallys is definitely not a good idea and will cause them to be // called twice. Finally(Finally &other) = delete; - Finally(Finally &&other) : fun(std::move(other.fun)) { + // NOTE: Move constructor can be nothrow if the callable type is itself nothrow + // move-constructible. + Finally(Finally && other) noexcept(std::is_nothrow_move_constructible_v) + : fun(std::move(other.fun)) + { other.movedFrom = true; } ~Finally() noexcept(false) diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index f15324d0a..72e5c731f 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -68,20 +68,6 @@ static RestoreSinkSettings restoreSinkSettings; static GlobalConfig::Register r1(&restoreSinkSettings); - -void RestoreSink::createDirectory(const CanonPath & path) -{ - std::filesystem::create_directory(dstPath / path.rel()); -}; - -struct RestoreRegularFile : CreateRegularFileSink { - AutoCloseFD fd; - - void operator () (std::string_view data) override; - void isExecutable() override; - void preallocateContents(uint64_t size) override; -}; - static std::filesystem::path append(const std::filesystem::path & src, const CanonPath & path) { auto dst = src; @@ -90,11 +76,38 @@ static std::filesystem::path append(const std::filesystem::path & src, const Can return dst; } +void RestoreSink::createDirectory(const CanonPath & path) +{ + auto p = append(dstPath, path); + if (!std::filesystem::create_directory(p)) + throw Error("path '%s' already exists", p.string()); +}; + +struct RestoreRegularFile : CreateRegularFileSink { + AutoCloseFD fd; + bool startFsync = false; + + ~RestoreRegularFile() + { + /* Initiate an fsync operation without waiting for the + result. The real fsync should be run before registering a + store path, but this is a performance optimization to allow + the disk write to start early. */ + if (fd && startFsync) + fd.startFsync(); + } + + void operator () (std::string_view data) override; + void isExecutable() override; + void preallocateContents(uint64_t size) override; +}; + void RestoreSink::createRegularFile(const CanonPath & path, std::function func) { auto p = append(dstPath, path); RestoreRegularFile crf; + crf.startFsync = startFsync; crf.fd = #ifdef _WIN32 CreateFileW(p.c_str(), GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL) diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh index de165fb7d..5c5073731 100644 --- a/src/libutil/fs-sink.hh +++ b/src/libutil/fs-sink.hh @@ -78,6 +78,11 @@ struct NullFileSystemObjectSink : FileSystemObjectSink struct RestoreSink : FileSystemObjectSink { std::filesystem::path dstPath; + bool startFsync = false; + + explicit RestoreSink(bool startFsync) + : startFsync{startFsync} + { } void createDirectory(const CanonPath & path) override; diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index ab2a8695d..748176d33 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -245,7 +245,12 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) } else if (isSRI || rest.size() == base64Len()) { - auto d = base64Decode(rest); + std::string d; + try { + d = base64Decode(rest); + } catch (Error & e) { + e.addTrace({}, "While decoding hash '%s'", rest); + } if (d.size() != hashSize) throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", rest); assert(hashSize); diff --git a/src/libutil/json-utils.hh b/src/libutil/json-utils.hh index fe7a406cf..a61c9cada 100644 --- a/src/libutil/json-utils.hh +++ b/src/libutil/json-utils.hh @@ -3,12 +3,13 @@ #include #include -#include #include "types.hh" namespace nix { +enum struct ExperimentalFeature; + const nlohmann::json * get(const nlohmann::json & map, const std::string & key); nlohmann::json * get(nlohmann::json & map, const std::string & key); @@ -71,6 +72,12 @@ struct json_avoids_null> : std::true_type {}; template struct json_avoids_null> : std::true_type {}; +/** + * `ExperimentalFeature` is always rendered as a string. + */ +template<> +struct json_avoids_null : std::true_type {}; + } namespace nlohmann { diff --git a/src/libutil/linux/cgroup.cc b/src/libutil/linux/cgroup.cc index 140ff4566..ad3e8a017 100644 --- a/src/libutil/linux/cgroup.cc +++ b/src/libutil/linux/cgroup.cc @@ -144,4 +144,23 @@ CgroupStats destroyCgroup(const Path & cgroup) return destroyCgroup(cgroup, true); } +std::string getCurrentCgroup() +{ + auto cgroupFS = getCgroupFS(); + if (!cgroupFS) + throw Error("cannot determine the cgroups file system"); + + auto ourCgroups = getCgroups("/proc/self/cgroup"); + auto ourCgroup = ourCgroups[""]; + if (ourCgroup == "") + throw Error("cannot determine cgroup name from /proc/self/cgroup"); + return ourCgroup; +} + +std::string getRootCgroup() +{ + static std::string rootCgroup = getCurrentCgroup(); + return rootCgroup; +} + } diff --git a/src/libutil/linux/cgroup.hh b/src/libutil/linux/cgroup.hh index 783a0ab87..87d135ba6 100644 --- a/src/libutil/linux/cgroup.hh +++ b/src/libutil/linux/cgroup.hh @@ -25,4 +25,13 @@ struct CgroupStats */ CgroupStats destroyCgroup(const Path & cgroup); +std::string getCurrentCgroup(); + +/** + * Get the cgroup that should be used as the parent when creating new + * sub-cgroups. The first time this is called, the current cgroup will be + * returned, and then all subsequent calls will return the original cgroup. + */ +std::string getRootCgroup(); + } diff --git a/src/libutil/linux/namespaces.cc b/src/libutil/linux/namespaces.cc index d4766cbba..c5e21dffc 100644 --- a/src/libutil/linux/namespaces.cc +++ b/src/libutil/linux/namespaces.cc @@ -118,7 +118,7 @@ void saveMountNamespace() void restoreMountNamespace() { try { - auto savedCwd = absPath("."); + auto savedCwd = std::filesystem::current_path(); if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1) throw SysError("restoring parent mount namespace"); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 29427f2f6..80c107ef5 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -38,7 +38,7 @@ void Logger::warn(const std::string & msg) void Logger::writeToStdout(std::string_view s) { - Descriptor standard_out = getStandardOut(); + Descriptor standard_out = getStandardOutput(); writeFull(standard_out, s); writeFull(standard_out, "\n"); } @@ -85,10 +85,10 @@ public: void logEI(const ErrorInfo & ei) override { - std::stringstream oss; + std::ostringstream oss; showErrorInfo(oss, ei, loggerSettings.showTrace.get()); - log(ei.level, oss.str()); + log(ei.level, toView(oss)); } void startActivity(ActivityId act, Verbosity lvl, ActivityType type, @@ -118,11 +118,7 @@ void writeToStderr(std::string_view s) { try { writeFull( -#ifdef _WIN32 - GetStdHandle(STD_ERROR_HANDLE), -#else - STDERR_FILENO, -#endif + getStandardError(), s, false); } catch (SystemError & e) { /* Ignore failing writes to stderr. We need to ignore write @@ -346,7 +342,7 @@ Activity::~Activity() try { logger.stopActivity(id); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } diff --git a/src/libutil/meson.build b/src/libutil/meson.build index 200eeb4e9..a6dc86394 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -28,6 +28,7 @@ subdir('build-utils-meson/subprojects') # HAVE_LUTIMES 1`. The `#define` is unconditional, 0 for not found and 1 # for found. One therefore uses it with `#if` not `#ifdef`. check_funcs = [ + 'close_range', # Optionally used for changing the mtime of symlinks. 'lutimes', # Optionally used for creating pipes on Unix @@ -41,6 +42,8 @@ check_funcs = [ # Optionally used to try to close more file descriptors (e.g. before # forking) on Unix. 'sysconf', + # Optionally used for changing the mtime of files and symlinks. + 'utimensat', ] foreach funcspec : check_funcs define_name = 'HAVE_' + funcspec.underscorify().to_upper() @@ -48,6 +51,9 @@ foreach funcspec : check_funcs configdata.set(define_name, define_value) endforeach +configdata.set('HAVE_DECL_AT_SYMLINK_NOFOLLOW', cxx.has_header_symbol('fcntl.h', 'AT_SYMLINK_NOFOLLOW').to_int()) + +subdir('build-utils-meson/libatomic') subdir('build-utils-meson/threads') if host_machine.system() == 'windows' @@ -185,6 +191,7 @@ headers = [config_h] + files( 'english.hh', 'environment-variables.hh', 'error.hh', + 'exec.hh', 'executable-path.hh', 'exit.hh', 'experimental-features.hh', @@ -251,6 +258,7 @@ else endif subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') this_library = library( 'nixutil', diff --git a/src/libutil/os-string.hh b/src/libutil/os-string.hh index 0d75173e5..3e24763fb 100644 --- a/src/libutil/os-string.hh +++ b/src/libutil/os-string.hh @@ -11,21 +11,30 @@ namespace nix { * Named because it is similar to the Rust type, except it is in the * native encoding not WTF-8. * - * Same as `std::filesystem::path::string_type`, but manually defined to + * Same as `std::filesystem::path::value_type`, but manually defined to * avoid including a much more complex header. */ -using OsString = std::basic_string< +using OsChar = #if defined(_WIN32) && !defined(__CYGWIN__) wchar_t #else char #endif - >; + ; + +/** + * Named because it is similar to the Rust type, except it is in the + * native encoding not WTF-8. + * + * Same as `std::filesystem::path::string_type`, but manually defined + * for the same reason as `OsChar`. + */ +using OsString = std::basic_string; /** * `std::string_view` counterpart for `OsString`. */ -using OsStringView = std::basic_string_view; +using OsStringView = std::basic_string_view; std::string os_string_to_string(OsStringView path); diff --git a/src/libutil/package.nix b/src/libutil/package.nix index 4ce1a75b0..17a156740 100644 --- a/src/libutil/package.nix +++ b/src/libutil/package.nix @@ -1,11 +1,6 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonLibrary , boost , brotli @@ -24,7 +19,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonLibrary (finalAttrs: { pname = "nix-util"; inherit version; @@ -43,14 +38,6 @@ mkMesonDerivation (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - outputs = [ "out" "dev" ]; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ brotli libsodium @@ -88,10 +75,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh index 6247b6125..b2ceb7143 100644 --- a/src/libutil/pool.hh +++ b/src/libutil/pool.hh @@ -109,7 +109,15 @@ public: Handle(Pool & pool, std::shared_ptr r) : pool(pool), r(r) { } public: - Handle(Handle && h) : pool(h.pool), r(h.r) { h.r.reset(); } + // NOTE: Copying std::shared_ptr and calling a .reset() on it is always noexcept. + Handle(Handle && h) noexcept + : pool(h.pool) + , r(h.r) + { + static_assert(noexcept(h.r.reset())); + static_assert(noexcept(std::shared_ptr(h.r))); + h.r.reset(); + } Handle(const Handle & l) = delete; diff --git a/src/libutil/position.cc b/src/libutil/position.cc index 5a2529262..946f167b6 100644 --- a/src/libutil/position.cc +++ b/src/libutil/position.cc @@ -9,7 +9,7 @@ Pos::Pos(const Pos * other) } line = other->line; column = other->column; - origin = std::move(other->origin); + origin = other->origin; } Pos::operator std::shared_ptr() const diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 2b1a485d5..50b436893 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -7,8 +7,8 @@ namespace nix { -PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && root) - : root(std::move(root)) +PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && argRoot) + : root(std::move(argRoot)) { assert(root.empty() || root.is_absolute()); displayPrefix = root.string(); @@ -20,7 +20,7 @@ PosixSourceAccessor::PosixSourceAccessor() SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) { - std::filesystem::path path2 = absPath(path.string()); + std::filesystem::path path2 = absPath(path); return { make_ref(path2.root_path()), CanonPath { path2.relative_path().string() }, @@ -132,23 +132,24 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & { assertNoSymlinks(path); DirEntries res; - for (auto & entry : std::filesystem::directory_iterator{makeAbsPath(path)}) { - checkInterrupt(); - auto type = [&]() -> std::optional { - std::filesystem::file_type nativeType; - try { - nativeType = entry.symlink_status().type(); - } catch (std::filesystem::filesystem_error & e) { - // We cannot always stat the child. (Ideally there is no - // stat because the native directory entry has the type - // already, but this isn't always the case.) - if (e.code() == std::errc::permission_denied || e.code() == std::errc::operation_not_permitted) - return std::nullopt; - else throw; - } + try { + for (auto & entry : std::filesystem::directory_iterator{makeAbsPath(path)}) { + checkInterrupt(); + auto type = [&]() -> std::optional { + std::filesystem::file_type nativeType; + try { + nativeType = entry.symlink_status().type(); + } catch (std::filesystem::filesystem_error & e) { + // We cannot always stat the child. (Ideally there is no + // stat because the native directory entry has the type + // already, but this isn't always the case.) + if (e.code() == std::errc::permission_denied || e.code() == std::errc::operation_not_permitted) + return std::nullopt; + else throw; + } - // cannot exhaustively enumerate because implementation-specific - // additional file types are allowed. + // cannot exhaustively enumerate because implementation-specific + // additional file types are allowed. #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" switch (nativeType) { @@ -158,8 +159,11 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & default: return tMisc; } #pragma GCC diagnostic pop - }(); - res.emplace(entry.path().filename().string(), type); + }(); + res.emplace(entry.path().filename().string(), type); + } + } catch (std::filesystem::filesystem_error & e) { + throw SysError("reading directory %1%", showPath(path)); } return res; } diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh index 3d0e64ab4..92688bf1e 100644 --- a/src/libutil/ref.hh +++ b/src/libutil/ref.hh @@ -18,11 +18,6 @@ private: std::shared_ptr p; public: - - ref(const ref & r) - : p(r.p) - { } - explicit ref(const std::shared_ptr & p) : p(p) { @@ -75,8 +70,6 @@ public: return ref((std::shared_ptr) p); } - ref & operator=(ref const & rhs) = default; - bool operator == (const ref & other) const { return p == other.p; diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 4899134d7..168d2ed32 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -1,5 +1,6 @@ #include "serialise.hh" #include "signals.hh" +#include "util.hh" #include #include @@ -9,7 +10,10 @@ #ifdef _WIN32 # include +# include # include "windows-error.hh" +#else +# include #endif @@ -49,7 +53,7 @@ void BufferedSink::flush() FdSink::~FdSink() { - try { flush(); } catch (...) { ignoreException(); } + try { flush(); } catch (...) { ignoreExceptionInDestructor(); } } @@ -158,6 +162,30 @@ bool FdSource::good() } +bool FdSource::hasData() +{ + if (BufferedSource::hasData()) return true; + + while (true) { + fd_set fds; + FD_ZERO(&fds); + int fd_ = fromDescriptorReadOnly(fd); + FD_SET(fd_, &fds); + + struct timeval timeout; + timeout.tv_sec = 0; + timeout.tv_usec = 0; + + auto n = select(fd_ + 1, &fds, nullptr, nullptr, &timeout); + if (n < 0) { + if (errno == EINTR) continue; + throw SysError("polling file descriptor"); + } + return FD_ISSET(fd, &fds); + } +} + + size_t StringSource::read(char * data, size_t len) { if (pos == s.size()) throw EndOfFile("end of string reached"); @@ -171,55 +199,6 @@ size_t StringSource::read(char * data, size_t len) #error Coroutines are broken in this version of Boost! #endif -/* A concrete datatype allow virtual dispatch of stack allocation methods. */ -struct VirtualStackAllocator { - StackAllocator *allocator = StackAllocator::defaultAllocator; - - boost::context::stack_context allocate() { - return allocator->allocate(); - } - - void deallocate(boost::context::stack_context sctx) { - allocator->deallocate(sctx); - } -}; - - -/* This class reifies the default boost coroutine stack allocation strategy with - a virtual interface. */ -class DefaultStackAllocator : public StackAllocator { - boost::coroutines2::default_stack stack; - - boost::context::stack_context allocate() override { - return stack.allocate(); - } - - void deallocate(boost::context::stack_context sctx) override { - stack.deallocate(sctx); - } -}; - -static DefaultStackAllocator defaultAllocatorSingleton; - -StackAllocator *StackAllocator::defaultAllocator = &defaultAllocatorSingleton; - - -std::shared_ptr (*create_coro_gc_hook)() = []() -> std::shared_ptr { - return {}; -}; - -/* This class is used for entry and exit hooks on coroutines */ -class CoroutineContext { - /* Disable GC when entering the coroutine without the boehm patch, - * since it doesn't find the main thread stack in this case. - * std::shared_ptr performs type-erasure, so it will call the right - * deleter. */ - const std::shared_ptr coro_gc_hook = create_coro_gc_hook(); -public: - CoroutineContext() {}; - ~CoroutineContext() {}; -}; - std::unique_ptr sourceToSink(std::function fun) { struct SourceToSink : FinishSink @@ -241,14 +220,12 @@ std::unique_ptr sourceToSink(std::function fun) cur = in; if (!coro) { - CoroutineContext ctx; - coro = coro_t::push_type(VirtualStackAllocator{}, [&](coro_t::pull_type & yield) { - LambdaSource source([&](char *out, size_t out_len) { + coro = coro_t::push_type([&](coro_t::pull_type & yield) { + LambdaSource source([&](char * out, size_t out_len) { if (cur.empty()) { yield(); - if (yield.get()) { - return (size_t)0; - } + if (yield.get()) + throw EndOfFile("coroutine has finished"); } size_t n = std::min(cur.size(), out_len); @@ -263,20 +240,14 @@ std::unique_ptr sourceToSink(std::function fun) if (!*coro) { unreachable(); } if (!cur.empty()) { - CoroutineContext ctx; (*coro)(false); } } void finish() override { - if (!coro) return; - if (!*coro) unreachable(); - { - CoroutineContext ctx; + if (coro && *coro) (*coro)(true); - } - if (*coro) unreachable(); } }; @@ -307,8 +278,7 @@ std::unique_ptr sinkToSource( size_t read(char * data, size_t len) override { if (!coro) { - CoroutineContext ctx; - coro = coro_t::pull_type(VirtualStackAllocator{}, [&](coro_t::push_type & yield) { + coro = coro_t::pull_type([&](coro_t::push_type & yield) { LambdaSink sink([&](std::string_view data) { if (!data.empty()) yield(std::string(data)); }); @@ -320,7 +290,6 @@ std::unique_ptr sinkToSource( if (pos == cur.size()) { if (!cur.empty()) { - CoroutineContext ctx; (*coro)(); } cur = coro->get(); diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index c7290dcef..14721d069 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -2,6 +2,7 @@ ///@file #include +#include #include "types.hh" #include "util.hh" @@ -104,6 +105,9 @@ struct BufferedSource : Source size_t read(char * data, size_t len) override; + /** + * Return true if the buffer is not empty. + */ bool hasData(); protected: @@ -162,6 +166,13 @@ struct FdSource : BufferedSource FdSource & operator=(FdSource && s) = default; bool good() override; + + /** + * Return true if the buffer is not empty after a non-blocking + * read. + */ + bool hasData(); + protected: size_t readUnbuffered(char * data, size_t len) override; private: @@ -192,7 +203,14 @@ struct StringSource : Source { std::string_view s; size_t pos; + + // NOTE: Prevent unintentional dangling views when an implicit conversion + // from std::string -> std::string_view occurs when the string is passed + // by rvalue. + StringSource(std::string &&) = delete; StringSource(std::string_view s) : s(s), pos(0) { } + StringSource(const std::string& str): StringSource(std::string_view(str)) {} + size_t read(char * data, size_t len) override; }; @@ -493,7 +511,7 @@ struct FramedSource : Source } } } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } @@ -522,15 +540,16 @@ struct FramedSource : Source /** * Write as chunks in the format expected by FramedSource. * - * The exception_ptr reference can be used to terminate the stream when you - * detect that an error has occurred on the remote end. + * The `checkError` function can be used to terminate the stream when you + * detect that an error has occurred. It does so by throwing an exception. */ struct FramedSink : nix::BufferedSink { BufferedSink & to; - std::exception_ptr & ex; + std::function checkError; - FramedSink(BufferedSink & to, std::exception_ptr & ex) : to(to), ex(ex) + FramedSink(BufferedSink & to, std::function && checkError) + : to(to), checkError(checkError) { } ~FramedSink() @@ -539,45 +558,18 @@ struct FramedSink : nix::BufferedSink to << 0; to.flush(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } void writeUnbuffered(std::string_view data) override { - /* Don't send more data if the remote has - encountered an error. */ - if (ex) { - auto ex2 = ex; - ex = nullptr; - std::rethrow_exception(ex2); - } + /* Don't send more data if an error has occured. */ + checkError(); + to << data.size(); to(data); }; }; -/** - * Stack allocation strategy for sinkToSource. - * Mutable to avoid a boehm gc dependency in libutil. - * - * boost::context doesn't provide a virtual class, so we define our own. - */ -struct StackAllocator { - virtual boost::context::stack_context allocate() = 0; - virtual void deallocate(boost::context::stack_context sctx) = 0; - - /** - * The stack allocator to use in sinkToSource and potentially elsewhere. - * It is reassigned by the initGC() method in libexpr. - */ - static StackAllocator *defaultAllocator; -}; - -/* Disabling GC when entering a coroutine (without the boehm patch). - mutable to avoid boehm gc dependency in libutil. - */ -extern std::shared_ptr (*create_coro_gc_hook)(); - - } diff --git a/src/libutil/signature/local-keys.cc b/src/libutil/signature/local-keys.cc index 858b036f5..70bcb5f33 100644 --- a/src/libutil/signature/local-keys.cc +++ b/src/libutil/signature/local-keys.cc @@ -14,17 +14,25 @@ BorrowedCryptoValue BorrowedCryptoValue::parse(std::string_view s) return {s.substr(0, colon), s.substr(colon + 1)}; } -Key::Key(std::string_view s) +Key::Key(std::string_view s, bool sensitiveValue) { auto ss = BorrowedCryptoValue::parse(s); name = ss.name; key = ss.payload; - if (name == "" || key == "") - throw Error("secret key is corrupt"); + try { + if (name == "" || key == "") + throw FormatError("key is corrupt"); - key = base64Decode(key); + key = base64Decode(key); + } catch (Error & e) { + std::string extra; + if (!sensitiveValue) + extra = fmt(" with raw value '%s'", key); + e.addTrace({}, "while decoding key named '%s'%s", name, extra); + throw; + } } std::string Key::to_string() const @@ -33,7 +41,7 @@ std::string Key::to_string() const } SecretKey::SecretKey(std::string_view s) - : Key(s) + : Key{s, true} { if (key.size() != crypto_sign_SECRETKEYBYTES) throw Error("secret key is not valid"); @@ -66,7 +74,7 @@ SecretKey SecretKey::generate(std::string_view name) } PublicKey::PublicKey(std::string_view s) - : Key(s) + : Key{s, false} { if (key.size() != crypto_sign_PUBLICKEYBYTES) throw Error("public key is not valid"); @@ -83,7 +91,12 @@ bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) cons bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) const { - auto sig2 = base64Decode(sig); + std::string sig2; + try { + sig2 = base64Decode(sig); + } catch (Error & e) { + e.addTrace({}, "while decoding signature '%s'", sig); + } if (sig2.size() != crypto_sign_BYTES) throw Error("signature is not valid"); diff --git a/src/libutil/signature/local-keys.hh b/src/libutil/signature/local-keys.hh index 4aafc1239..9977f0dac 100644 --- a/src/libutil/signature/local-keys.hh +++ b/src/libutil/signature/local-keys.hh @@ -31,15 +31,19 @@ struct Key std::string name; std::string key; - /** - * Construct Key from a string in the format - * ‘:’. - */ - Key(std::string_view s); - std::string to_string() const; protected: + + /** + * Construct Key from a string in the format + * ‘:’. + * + * @param sensitiveValue Avoid displaying the raw Base64 in error + * messages to avoid leaking private keys. + */ + Key(std::string_view s, bool sensitiveValue); + Key(std::string_view name, std::string && key) : name(name), key(std::move(key)) { } }; diff --git a/src/libutil/strings.cc b/src/libutil/strings.cc index 60297228e..c221a43c6 100644 --- a/src/libutil/strings.cc +++ b/src/libutil/strings.cc @@ -1,11 +1,27 @@ #include #include +#include #include "strings-inline.hh" #include "os-string.hh" namespace nix { +struct view_stringbuf : public std::stringbuf +{ + inline std::string_view toView() + { + auto begin = pbase(); + return {begin, begin + pubseekoff(0, std::ios_base::cur, std::ios_base::out)}; + } +}; + +std::string_view toView(const std::ostringstream & os) +{ + auto buf = static_cast(os.rdbuf()); + return buf->toView(); +} + template std::list tokenizeString(std::string_view s, std::string_view separators); template std::set tokenizeString(std::string_view s, std::string_view separators); template std::vector tokenizeString(std::string_view s, std::string_view separators); @@ -14,8 +30,8 @@ template std::list splitString(std::string_view s, std::string_view template std::set splitString(std::string_view s, std::string_view separators); template std::vector splitString(std::string_view s, std::string_view separators); -template std::list basicSplitString( - std::basic_string_view s, std::basic_string_view separators); +template std::list +basicSplitString(std::basic_string_view s, std::basic_string_view separators); template std::string concatStringsSep(std::string_view, const std::list &); template std::string concatStringsSep(std::string_view, const std::set &); diff --git a/src/libutil/strings.hh b/src/libutil/strings.hh index 88b48d770..533126be1 100644 --- a/src/libutil/strings.hh +++ b/src/libutil/strings.hh @@ -8,6 +8,11 @@ namespace nix { +/* + * workaround for unavailable view() method (C++20) of std::ostringstream under MacOS with clang-16 + */ +std::string_view toView(const std::ostringstream & os); + /** * String tokenizer. * diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 2e3236295..a8a22d283 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -8,6 +8,10 @@ namespace nix { +namespace fs { +using namespace std::filesystem; +} + namespace { int callback_open(struct archive *, void * self) @@ -102,14 +106,14 @@ TarArchive::TarArchive(Source & source, bool raw, std::optional com "Failed to open archive (%s)"); } -TarArchive::TarArchive(const Path & path) +TarArchive::TarArchive(const fs::path & path) : archive{archive_read_new()} , buffer(defaultBufferSize) { archive_read_support_filter_all(archive); enableSupportedFormats(archive); archive_read_set_option(archive, NULL, "mac-ext", NULL); - check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); + check(archive_read_open_filename(archive, path.string().c_str(), 16384), "failed to open archive: %s"); } void TarArchive::close() @@ -123,7 +127,7 @@ TarArchive::~TarArchive() archive_read_free(this->archive); } -static void extract_archive(TarArchive & archive, const Path & destDir) +static void extract_archive(TarArchive & archive, const fs::path & destDir) { int flags = ARCHIVE_EXTRACT_TIME | ARCHIVE_EXTRACT_SECURE_SYMLINKS | ARCHIVE_EXTRACT_SECURE_NODOTDOT; @@ -140,7 +144,7 @@ static void extract_archive(TarArchive & archive, const Path & destDir) else archive.check(r); - archive_entry_copy_pathname(entry, (destDir + "/" + name).c_str()); + archive_entry_copy_pathname(entry, (destDir / name).string().c_str()); // sources can and do contain dirs with no rx bits if (archive_entry_filetype(entry) == AE_IFDIR && (archive_entry_mode(entry) & 0500) != 0500) @@ -149,7 +153,7 @@ static void extract_archive(TarArchive & archive, const Path & destDir) // Patch hardlink path const char * original_hardlink = archive_entry_hardlink(entry); if (original_hardlink) { - archive_entry_copy_hardlink(entry, (destDir + "/" + original_hardlink).c_str()); + archive_entry_copy_hardlink(entry, (destDir / original_hardlink).string().c_str()); } archive.check(archive_read_extract(archive.archive, entry, flags)); @@ -158,19 +162,19 @@ static void extract_archive(TarArchive & archive, const Path & destDir) archive.close(); } -void unpackTarfile(Source & source, const Path & destDir) +void unpackTarfile(Source & source, const fs::path & destDir) { auto archive = TarArchive(source); - createDirs(destDir); + fs::create_directories(destDir); extract_archive(archive, destDir); } -void unpackTarfile(const Path & tarFile, const Path & destDir) +void unpackTarfile(const fs::path & tarFile, const fs::path & destDir) { auto archive = TarArchive(tarFile); - createDirs(destDir); + fs::create_directories(destDir); extract_archive(archive, destDir); } diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh index 0517177db..5e29c6bba 100644 --- a/src/libutil/tarfile.hh +++ b/src/libutil/tarfile.hh @@ -15,7 +15,7 @@ struct TarArchive void check(int err, const std::string & reason = "failed to extract archive (%s)"); - explicit TarArchive(const Path & path); + explicit TarArchive(const std::filesystem::path & path); /// @brief Create a generic archive from source. /// @param source - Input byte stream. @@ -37,9 +37,9 @@ struct TarArchive int getArchiveFilterCodeByName(const std::string & method); -void unpackTarfile(Source & source, const Path & destDir); +void unpackTarfile(Source & source, const std::filesystem::path & destDir); -void unpackTarfile(const Path & tarFile, const Path & destDir); +void unpackTarfile(const std::filesystem::path & tarFile, const std::filesystem::path & destDir); time_t unpackTarfileToSink(TarArchive & archive, ExtendedFileSystemObjectSink & parseSink); diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index 0f6349642..0725c1926 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -110,11 +110,15 @@ void ThreadPool::doWork(bool mainThread) propagate it. */ try { std::rethrow_exception(exc); + } catch (const Interrupted &) { + // The interrupted state may be picked up by multiple + // workers, which is expected, so we should ignore + // it silently and let the first one bubble up, + // rethrown via the original state->exception. + } catch (const ThreadPoolShutDown &) { + // Similarly expected. } catch (std::exception & e) { - if (!dynamic_cast(&e) && - !dynamic_cast(&e)) - ignoreException(); - } catch (...) { + ignoreExceptionExceptInterrupt(); } } } diff --git a/src/libutil/unix/file-descriptor.cc b/src/libutil/unix/file-descriptor.cc index a3af1623f..ac7c086af 100644 --- a/src/libutil/unix/file-descriptor.cc +++ b/src/libutil/unix/file-descriptor.cc @@ -47,7 +47,7 @@ void writeFull(int fd, std::string_view s, bool allowInterrupts) } -std::string readLine(int fd) +std::string readLine(int fd, bool eofOk) { std::string s; while (1) { @@ -58,8 +58,12 @@ std::string readLine(int fd) if (rd == -1) { if (errno != EINTR) throw SysError("reading a line"); - } else if (rd == 0) - throw EndOfFile("unexpected EOF reading a line"); + } else if (rd == 0) { + if (eofOk) + return s; + else + throw EndOfFile("unexpected EOF reading a line"); + } else { if (ch == '\n') return s; s += ch; @@ -120,14 +124,38 @@ void Pipe::create() ////////////////////////////////////////////////////////////////////// -void unix::closeMostFDs(const std::set & exceptions) +#if __linux__ || __FreeBSD__ +static int unix_close_range(unsigned int first, unsigned int last, int flags) { +#if !HAVE_CLOSE_RANGE + return syscall(SYS_close_range, first, last, (unsigned int)flags); +#else + return close_range(first, last, flags); +#endif +} +#endif + +void unix::closeExtraFDs() +{ + constexpr int MAX_KEPT_FD = 2; + static_assert(std::max({STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO}) == MAX_KEPT_FD); + +#if __linux__ || __FreeBSD__ + // first try to close_range everything we don't care about. if this + // returns an error with these parameters we're running on a kernel + // that does not implement close_range (i.e. pre 5.9) and fall back + // to the old method. we should remove that though, in some future. + if (unix_close_range(MAX_KEPT_FD + 1, ~0U, 0) == 0) { + return; + } +#endif + #if __linux__ try { for (auto & s : std::filesystem::directory_iterator{"/proc/self/fd"}) { checkInterrupt(); auto fd = std::stoi(s.path().filename()); - if (!exceptions.count(fd)) { + if (fd > MAX_KEPT_FD) { debug("closing leaked FD %d", fd); close(fd); } @@ -142,9 +170,8 @@ void unix::closeMostFDs(const std::set & exceptions) #if HAVE_SYSCONF maxFD = sysconf(_SC_OPEN_MAX); #endif - for (int fd = 0; fd < maxFD; ++fd) - if (!exceptions.count(fd)) - close(fd); /* ignore result */ + for (int fd = MAX_KEPT_FD + 1; fd < maxFD; ++fd) + close(fd); /* ignore result */ } diff --git a/src/libutil/unix/processes.cc b/src/libutil/unix/processes.cc index c5ce74acc..43d9179d9 100644 --- a/src/libutil/unix/processes.cc +++ b/src/libutil/unix/processes.cc @@ -1,5 +1,6 @@ #include "current-process.hh" #include "environment-variables.hh" +#include "executable-path.hh" #include "signals.hh" #include "processes.hh" #include "finally.hh" @@ -419,4 +420,12 @@ bool statusOk(int status) return WIFEXITED(status) && WEXITSTATUS(status) == 0; } +int execvpe(const char * file0, const char * const argv[], const char * const envp[]) +{ + auto file = ExecutablePath::load().findPath(file0); + // `const_cast` is safe. See the note in + // https://pubs.opengroup.org/onlinepubs/9799919799/functions/exec.html + return execve(file.c_str(), const_cast(argv), const_cast(envp)); +} + } diff --git a/src/libutil/unix/signals-impl.hh b/src/libutil/unix/signals-impl.hh index 7ac8c914d..2193922be 100644 --- a/src/libutil/unix/signals-impl.hh +++ b/src/libutil/unix/signals-impl.hh @@ -84,6 +84,12 @@ static inline bool getInterrupted() return unix::_isInterrupted; } +/** + * Throw `Interrupted` exception if the process has been interrupted. + * + * Call this in long-running loops and between slow operations to terminate + * them as needed. + */ void inline checkInterrupt() { using namespace unix; diff --git a/src/libutil/unix/signals.cc b/src/libutil/unix/signals.cc index 7e30687d8..d0608dace 100644 --- a/src/libutil/unix/signals.cc +++ b/src/libutil/unix/signals.cc @@ -91,7 +91,7 @@ void unix::triggerInterrupt() try { callback(); } catch (...) { - ignoreException(); + ignoreExceptionInDestructor(); } } } diff --git a/src/libutil/unix/users.cc b/src/libutil/unix/users.cc index 58063a953..107a6e04f 100644 --- a/src/libutil/unix/users.cc +++ b/src/libutil/unix/users.cc @@ -9,6 +9,8 @@ namespace nix { +namespace fs { using namespace std::filesystem; } + std::string getUserName() { auto pw = getpwuid(geteuid()); diff --git a/src/libutil/url.cc b/src/libutil/url.cc index bcbe9ea4e..9ed49dcbe 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -79,10 +79,14 @@ std::map decodeQuery(const std::string & query) for (auto s : tokenizeString(query, "&")) { auto e = s.find('='); - if (e != std::string::npos) - result.emplace( - s.substr(0, e), - percentDecode(std::string_view(s).substr(e + 1))); + if (e == std::string::npos) { + warn("dubious URI query '%s' is missing equal sign '%s', ignoring", s, "="); + continue; + } + + result.emplace( + s.substr(0, e), + percentDecode(std::string_view(s).substr(e + 1))); } return result; diff --git a/src/libutil/users.cc b/src/libutil/users.cc index d546e364f..b4bc67cbc 100644 --- a/src/libutil/users.cc +++ b/src/libutil/users.cc @@ -7,15 +7,33 @@ namespace nix { Path getCacheDir() { - auto cacheDir = getEnv("XDG_CACHE_HOME"); - return cacheDir ? *cacheDir : getHome() + "/.cache"; + auto dir = getEnv("NIX_CACHE_HOME"); + if (dir) { + return *dir; + } else { + auto xdgDir = getEnv("XDG_CACHE_HOME"); + if (xdgDir) { + return *xdgDir + "/nix"; + } else { + return getHome() + "/.cache/nix"; + } + } } Path getConfigDir() { - auto configDir = getEnv("XDG_CONFIG_HOME"); - return configDir ? *configDir : getHome() + "/.config"; + auto dir = getEnv("NIX_CONFIG_HOME"); + if (dir) { + return *dir; + } else { + auto xdgDir = getEnv("XDG_CONFIG_HOME"); + if (xdgDir) { + return *xdgDir + "/nix"; + } else { + return getHome() + "/.config/nix"; + } + } } std::vector getConfigDirs() @@ -23,6 +41,9 @@ std::vector getConfigDirs() Path configHome = getConfigDir(); auto configDirs = getEnv("XDG_CONFIG_DIRS").value_or("/etc/xdg"); std::vector result = tokenizeString>(configDirs, ":"); + for (auto& p : result) { + p += "/nix"; + } result.insert(result.begin(), configHome); return result; } @@ -30,19 +51,37 @@ std::vector getConfigDirs() Path getDataDir() { - auto dataDir = getEnv("XDG_DATA_HOME"); - return dataDir ? *dataDir : getHome() + "/.local/share"; + auto dir = getEnv("NIX_DATA_HOME"); + if (dir) { + return *dir; + } else { + auto xdgDir = getEnv("XDG_DATA_HOME"); + if (xdgDir) { + return *xdgDir + "/nix"; + } else { + return getHome() + "/.local/share/nix"; + } + } } Path getStateDir() { - auto stateDir = getEnv("XDG_STATE_HOME"); - return stateDir ? *stateDir : getHome() + "/.local/state"; + auto dir = getEnv("NIX_STATE_HOME"); + if (dir) { + return *dir; + } else { + auto xdgDir = getEnv("XDG_STATE_HOME"); + if (xdgDir) { + return *xdgDir + "/nix"; + } else { + return getHome() + "/.local/state/nix"; + } + } } Path createNixStateDir() { - Path dir = getStateDir() + "/nix"; + Path dir = getStateDir(); createDirs(dir); return dir; } diff --git a/src/libutil/users.hh b/src/libutil/users.hh index 153cc73fd..d22c3311d 100644 --- a/src/libutil/users.hh +++ b/src/libutil/users.hh @@ -24,12 +24,12 @@ Path getHomeOf(uid_t userId); Path getHome(); /** - * @return $XDG_CACHE_HOME or $HOME/.cache. + * @return $NIX_CACHE_HOME or $XDG_CACHE_HOME/nix or $HOME/.cache/nix. */ Path getCacheDir(); /** - * @return $XDG_CONFIG_HOME or $HOME/.config. + * @return $NIX_CONFIG_HOME or $XDG_CONFIG_HOME/nix or $HOME/.config/nix. */ Path getConfigDir(); @@ -39,12 +39,12 @@ Path getConfigDir(); std::vector getConfigDirs(); /** - * @return $XDG_DATA_HOME or $HOME/.local/share. + * @return $NIX_DATA_HOME or $XDG_DATA_HOME/nix or $HOME/.local/share/nix. */ Path getDataDir(); /** - * @return $XDG_STATE_HOME or $HOME/.local/state. + * @return $NIX_STATE_HOME or $XDG_STATE_HOME/nix or $HOME/.local/state/nix. */ Path getStateDir(); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index db3ed1ddf..ed5c7e4f1 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1,6 +1,7 @@ #include "util.hh" #include "fmt.hh" #include "file-path.hh" +#include "signals.hh" #include #include @@ -182,7 +183,7 @@ std::string shellEscape(const std::string_view s) } -void ignoreException(Verbosity lvl) +void ignoreExceptionInDestructor(Verbosity lvl) { /* Make sure no exceptions leave this function. printError() also throws when remote is closed. */ @@ -195,6 +196,17 @@ void ignoreException(Verbosity lvl) } catch (...) { } } +void ignoreExceptionExceptInterrupt(Verbosity lvl) +{ + try { + throw; + } catch (const Interrupted & e) { + throw; + } catch (std::exception & e) { + printMsg(lvl, "error (ignored): %1%", e.what()); + } +} + constexpr char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; @@ -244,7 +256,7 @@ std::string base64Decode(std::string_view s) char digit = base64DecodeChars[(unsigned char) c]; if (digit == npos) - throw Error("invalid character in Base64 string: '%c'", c); + throw FormatError("invalid character in Base64 string: '%c'", c); bits += 6; d = d << 6 | digit; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 25128a900..0fb6ff837 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -156,9 +156,26 @@ std::string toLower(std::string s); std::string shellEscape(const std::string_view s); -/* Exception handling in destructors: print an error message, then - ignore the exception. */ -void ignoreException(Verbosity lvl = lvlError); +/** + * Exception handling in destructors: print an error message, then + * ignore the exception. + * + * If you're not in a destructor, you usually want to use `ignoreExceptionExceptInterrupt()`. + * + * This function might also be used in callbacks whose caller may not handle exceptions, + * but ideally we propagate the exception using an exception_ptr in such cases. + * See e.g. `PackBuilderContext` + */ +void ignoreExceptionInDestructor(Verbosity lvl = lvlError); + +/** + * Not destructor-safe. + * Print an error message, then ignore the exception. + * If the exception is an `Interrupted` exception, rethrow it. + * + * This may be used in a few places where Interrupt can't happen, but that's ok. + */ +void ignoreExceptionExceptInterrupt(Verbosity lvl = lvlError); @@ -172,9 +189,13 @@ constexpr char treeNull[] = " "; /** - * Base64 encoding/decoding. + * Encode arbitrary bytes as Base64. */ std::string base64Encode(std::string_view s); + +/** + * Decode arbitrary bytes to Base64. + */ std::string base64Decode(std::string_view s); diff --git a/src/libutil/windows/file-descriptor.cc b/src/libutil/windows/file-descriptor.cc index 16773e3ea..7b8a712e8 100644 --- a/src/libutil/windows/file-descriptor.cc +++ b/src/libutil/windows/file-descriptor.cc @@ -61,7 +61,7 @@ void writeFull(HANDLE handle, std::string_view s, bool allowInterrupts) } -std::string readLine(HANDLE handle) +std::string readLine(HANDLE handle, bool eofOk) { std::string s; while (1) { @@ -71,8 +71,12 @@ std::string readLine(HANDLE handle) DWORD rd; if (!ReadFile(handle, &ch, 1, &rd, NULL)) { throw WinError("reading a line"); - } else if (rd == 0) - throw EndOfFile("unexpected EOF reading a line"); + } else if (rd == 0) { + if (eofOk) + return s; + else + throw EndOfFile("unexpected EOF reading a line"); + } else { if (ch == '\n') return s; s += ch; diff --git a/src/libutil/windows/processes.cc b/src/libutil/windows/processes.cc index 9cd714f84..7f34c5632 100644 --- a/src/libutil/windows/processes.cc +++ b/src/libutil/windows/processes.cc @@ -1,6 +1,7 @@ #include "current-process.hh" #include "environment-variables.hh" #include "error.hh" +#include "executable-path.hh" #include "file-descriptor.hh" #include "file-path.hh" #include "signals.hh" @@ -377,4 +378,11 @@ bool statusOk(int status) { return status == 0; } + +int execvpe(const wchar_t * file0, const wchar_t * const argv[], const wchar_t * const envp[]) +{ + auto file = ExecutablePath::load().findPath(file0); + return _wexecve(file.c_str(), argv, envp); +} + } diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 0ce987d8a..c394836da 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -36,7 +36,7 @@ extern char * * environ __attribute__((weak)); /* Recreate the effect of the perl shellwords function, breaking up a * string into arguments like a shell word, including escapes */ -static std::vector shellwords(const std::string & s) +static std::vector shellwords(std::string_view s) { std::regex whitespace("^\\s+"); auto begin = s.cbegin(); @@ -51,7 +51,7 @@ static std::vector shellwords(const std::string & s) auto it = begin; for (; it != s.cend(); ++it) { if (st == sBegin) { - std::smatch match; + std::cmatch match; if (regex_search(it, s.cend(), match, whitespace)) { cur.append(begin, it); res.push_back(cur); @@ -163,7 +163,7 @@ static void main_nix_build(int argc, char * * argv) script = argv[1]; try { auto lines = tokenizeString(readFile(script), "\n"); - if (std::regex_search(lines.front(), std::regex("^#!"))) { + if (!lines.empty() && std::regex_search(lines.front(), std::regex("^#!"))) { lines.pop_front(); inShebang = true; for (int i = 2; i < argc; ++i) @@ -173,7 +173,7 @@ static void main_nix_build(int argc, char * * argv) line = chomp(line); std::smatch match; if (std::regex_match(line, match, std::regex("^#!\\s*nix-shell\\s+(.*)$"))) - for (const auto & word : shellwords(match[1].str())) + for (const auto & word : shellwords({match[1].first, match[1].second})) args.push_back(word); } } @@ -260,9 +260,9 @@ static void main_nix_build(int argc, char * * argv) // read the shebang to understand which packages to read from. Since // this is handled via nix-shell -p, we wrap our ruby script execution // in ruby -e 'load' which ignores the shebangs. - envCommand = fmt("exec %1% %2% -e 'load(ARGV.shift)' -- %3% %4%", execArgs, interpreter, shellEscape(script), joined.str()); + envCommand = fmt("exec %1% %2% -e 'load(ARGV.shift)' -- %3% %4%", execArgs, interpreter, shellEscape(script), toView(joined)); } else { - envCommand = fmt("exec %1% %2% %3% %4%", execArgs, interpreter, shellEscape(script), joined.str()); + envCommand = fmt("exec %1% %2% %3% %4%", execArgs, interpreter, shellEscape(script), toView(joined)); } } @@ -526,8 +526,6 @@ static void main_nix_build(int argc, char * * argv) // Set the environment. auto env = getEnv(); - auto tmp = getEnvNonEmpty("TMPDIR").value_or("/tmp"); - if (pure) { decltype(env) newEnv; for (auto & i : env) @@ -538,18 +536,16 @@ static void main_nix_build(int argc, char * * argv) env["__ETC_PROFILE_SOURCED"] = "1"; } - env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmp; + env["NIX_BUILD_TOP"] = env["TMPDIR"] = env["TEMPDIR"] = env["TMP"] = env["TEMP"] = tmpDir.path().string(); env["NIX_STORE"] = store->storeDir; env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); auto passAsFile = tokenizeString(getOr(drv.env, "passAsFile", "")); - bool keepTmp = false; int fileNr = 0; for (auto & var : drv.env) if (passAsFile.count(var.first)) { - keepTmp = true; auto fn = ".attr-" + std::to_string(fileNr++); Path p = (tmpDir.path() / fn).string(); writeFile(p, var.second); @@ -591,7 +587,6 @@ static void main_nix_build(int argc, char * * argv) env["NIX_ATTRS_SH_FILE"] = attrsSH; env["NIX_ATTRS_JSON_FILE"] = attrsJSON; - keepTmp = true; } } @@ -601,12 +596,10 @@ static void main_nix_build(int argc, char * * argv) lose the current $PATH directories. */ auto rcfile = (tmpDir.path() / "rc").string(); std::string rc = fmt( - R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; }; )"s + - (keepTmp ? - "trap _nix_shell_clean_tmpdir EXIT; " - "exitHooks+=(_nix_shell_clean_tmpdir); " - "failureHooks+=(_nix_shell_clean_tmpdir); ": - "_nix_shell_clean_tmpdir; ") + + (R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; };)"s + "trap _nix_shell_clean_tmpdir EXIT; " + "exitHooks+=(_nix_shell_clean_tmpdir); " + "failureHooks+=(_nix_shell_clean_tmpdir); ") + (pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") + "%2%" // always clear PATH. diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 878f3c828..56d1d7abb 100644 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -18,7 +18,7 @@ using namespace nix; typedef std::map Channels; static Channels channels; -static Path channelsList; +static std::filesystem::path channelsList; // Reads the list of channels. static void readChannels() @@ -42,7 +42,7 @@ static void writeChannels() { auto channelsFD = AutoCloseFD{open(channelsList.c_str(), O_WRONLY | O_CLOEXEC | O_CREAT | O_TRUNC, 0644)}; if (!channelsFD) - throw SysError("opening '%1%' for writing", channelsList); + throw SysError("opening '%1%' for writing", channelsList.string()); for (const auto & channel : channels) writeFull(channelsFD.get(), channel.second + " " + channel.first + "\n"); } diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index 91209c978..20d5161df 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -11,6 +11,8 @@ #include #include +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; std::string deleteOlderThan; @@ -21,23 +23,23 @@ bool dryRun = false; * Of course, this makes rollbacks to before this point in time * impossible. */ -void removeOldGenerations(std::string dir) +void removeOldGenerations(fs::path dir) { - if (access(dir.c_str(), R_OK) != 0) return; + if (access(dir.string().c_str(), R_OK) != 0) return; - bool canWrite = access(dir.c_str(), W_OK) == 0; + bool canWrite = access(dir.string().c_str(), W_OK) == 0; - for (auto & i : std::filesystem::directory_iterator{dir}) { + for (auto & i : fs::directory_iterator{dir}) { checkInterrupt(); auto path = i.path().string(); auto type = i.symlink_status().type(); - if (type == std::filesystem::file_type::symlink && canWrite) { + if (type == fs::file_type::symlink && canWrite) { std::string link; try { link = readLink(path); - } catch (std::filesystem::filesystem_error & e) { + } catch (fs::filesystem_error & e) { if (e.code() == std::errc::no_such_file_or_directory) continue; throw; } @@ -49,7 +51,7 @@ void removeOldGenerations(std::string dir) } else deleteOldGenerations(path, dryRun); } - } else if (type == std::filesystem::file_type::directory) { + } else if (type == fs::file_type::directory) { removeOldGenerations(path); } } @@ -81,8 +83,11 @@ static int main_nix_collect_garbage(int argc, char * * argv) }); if (removeOld) { - std::set dirsToClean = { - profilesDir(), settings.nixStateDir + "/profiles", dirOf(getDefaultProfile())}; + std::set dirsToClean = { + profilesDir(), + fs::path{settings.nixStateDir} / "profiles", + fs::path{getDefaultProfile()}.parent_path(), + }; for (auto & dir : dirsToClean) removeOldGenerations(dir); } diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index a24dd11d6..ee62077c0 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -111,9 +111,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems, auto manifestFile = ({ std::ostringstream str; printAmbiguous(manifest, state.symbols, str, nullptr, std::numeric_limits::max()); - // TODO with C++20 we can use str.view() instead and avoid copy. - std::string str2 = str.str(); - StringSource source { str2 }; + StringSource source { toView(str) }; state.store->addToStoreFromDump( source, "env-manifest.nix", FileSerialisationMethod::Flat, ContentAddressMethod::Raw::Text, HashAlgorithm::SHA256, references); }); diff --git a/src/nix-expr-test-support b/src/nix-expr-test-support deleted file mode 120000 index 427b80dff..000000000 --- a/src/nix-expr-test-support +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libexpr-support \ No newline at end of file diff --git a/src/nix-expr-tests b/src/nix-expr-tests deleted file mode 120000 index 3af7110d3..000000000 --- a/src/nix-expr-tests +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libexpr \ No newline at end of file diff --git a/src/nix-fetchers-tests b/src/nix-fetchers-tests deleted file mode 120000 index 80e4b68ae..000000000 --- a/src/nix-fetchers-tests +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libfetchers \ No newline at end of file diff --git a/src/nix-flake-tests b/src/nix-flake-tests deleted file mode 120000 index bb2d49400..000000000 --- a/src/nix-flake-tests +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libflake \ No newline at end of file diff --git a/src/nix-manual b/src/nix-manual new file mode 120000 index 000000000..492c97408 --- /dev/null +++ b/src/nix-manual @@ -0,0 +1 @@ +../doc/manual/ \ No newline at end of file diff --git a/src/nix-store-test-support b/src/nix-store-test-support deleted file mode 120000 index af4befd90..000000000 --- a/src/nix-store-test-support +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libstore-support \ No newline at end of file diff --git a/src/nix-store-tests b/src/nix-store-tests deleted file mode 120000 index fc9b910af..000000000 --- a/src/nix-store-tests +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libstore \ No newline at end of file diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index b4de42ba1..c823c930e 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -694,7 +694,7 @@ static void opDump(Strings opFlags, Strings opArgs) if (!opFlags.empty()) throw UsageError("unknown flag"); if (opArgs.size() != 1) throw UsageError("only one argument allowed"); - FdSink sink(getStandardOut()); + FdSink sink(getStandardOutput()); std::string path = *opArgs.begin(); dumpPath(path, sink); sink.flush(); @@ -722,7 +722,7 @@ static void opExport(Strings opFlags, Strings opArgs) for (auto & i : opArgs) paths.insert(store->followLinksToStorePath(i)); - FdSink sink(getStandardOut()); + FdSink sink(getStandardOutput()); store->exportPaths(paths, sink); sink.flush(); } @@ -835,7 +835,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (!opArgs.empty()) throw UsageError("no arguments expected"); FdSource in(STDIN_FILENO); - FdSink out(getStandardOut()); + FdSink out(getStandardOutput()); /* Exchange the greeting. */ ServeProto::Version clientVersion = diff --git a/src/nix-util-test-support b/src/nix-util-test-support deleted file mode 120000 index 4b25930eb..000000000 --- a/src/nix-util-test-support +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libutil-support \ No newline at end of file diff --git a/src/nix-util-tests b/src/nix-util-tests deleted file mode 120000 index e1138411a..000000000 --- a/src/nix-util-tests +++ /dev/null @@ -1 +0,0 @@ -../tests/unit/libutil \ No newline at end of file diff --git a/src/nix/build.cc b/src/nix/build.cc index 479100186..da9132d02 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -43,22 +43,22 @@ static nlohmann::json builtPathsWithResultToJSON(const std::vector& buildables, LocalFSStore& store2) +static void createOutLinks(const std::filesystem::path& outLink, const std::vector& buildables, LocalFSStore& store2) { for (const auto & [_i, buildable] : enumerate(buildables)) { auto i = _i; std::visit(overloaded { [&](const BuiltPath::Opaque & bo) { - std::string symlink = outLink; + auto symlink = outLink; if (i) symlink += fmt("-%d", i); - store2.addPermRoot(bo.path, absPath(symlink)); + store2.addPermRoot(bo.path, absPath(symlink.string())); }, [&](const BuiltPath::Built & bfd) { for (auto & output : bfd.outputs) { - std::string symlink = outLink; + auto symlink = outLink; if (i) symlink += fmt("-%d", i); if (output.first != "out") symlink += fmt("-%s", output.first); - store2.addPermRoot(output.second, absPath(symlink)); + store2.addPermRoot(output.second, absPath(symlink.string())); } }, }, buildable.path.raw()); diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 7d9aa7711..5b7862c4e 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -6,6 +6,8 @@ #include "local-fs-store.hh" #include "eval-inline.hh" +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; struct CmdBundle : InstallableValueCommand @@ -78,7 +80,7 @@ struct CmdBundle : InstallableValueCommand auto [bundlerFlakeRef, bundlerName, extendedOutputsSpec] = parseFlakeRefWithFragmentAndExtendedOutputsSpec( - fetchSettings, bundler, absPath(".")); + fetchSettings, bundler, fs::current_path().string()); const flake::LockFlags lockFlags{ .writeLockFile = false }; InstallableFlake bundler{this, evalState, std::move(bundlerFlakeRef), bundlerName, std::move(extendedOutputsSpec), @@ -117,8 +119,6 @@ struct CmdBundle : InstallableValueCommand }, }); - auto outPathS = store->printStorePath(outPath); - if (!outLink) { auto * attr = vRes->attrs()->get(evalState->sName); if (!attr) diff --git a/src/nix/cat.cc b/src/nix/cat.cc index ee904b0c5..e0179c348 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -16,7 +16,7 @@ struct MixCat : virtual Args throw Error("path '%1%' is not a regular file", path); stopProgressBar(); - writeFull(getStandardOut(), accessor->readFile(CanonPath(path))); + writeFull(getStandardOutput(), accessor->readFile(CanonPath(path))); } }; diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index 1a6574de2..a72b06542 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -10,6 +10,8 @@ #include "worker-protocol.hh" #include "executable-path.hh" +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; namespace { @@ -24,24 +26,22 @@ std::string formatProtocol(unsigned int proto) return "unknown"; } -bool checkPass(const std::string & msg) { +bool checkPass(std::string_view msg) { notice(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg); return true; } -bool checkFail(const std::string & msg) { +bool checkFail(std::string_view msg) { notice(ANSI_RED "[FAIL] " ANSI_NORMAL + msg); return false; } -void checkInfo(const std::string & msg) { +void checkInfo(std::string_view msg) { notice(ANSI_BLUE "[INFO] " ANSI_NORMAL + msg); } } -namespace fs = std::filesystem; - struct CmdConfigCheck : StoreCommand { bool success = true; @@ -87,11 +87,11 @@ struct CmdConfigCheck : StoreCommand } if (dirs.size() != 1) { - std::stringstream ss; + std::ostringstream ss; ss << "Multiple versions of nix found in PATH:\n"; for (auto & dir : dirs) ss << " " << dir << "\n"; - return checkFail(ss.str()); + return checkFail(toView(ss)); } return checkPass("PATH contains only one nix version."); @@ -125,14 +125,14 @@ struct CmdConfigCheck : StoreCommand } if (!dirs.empty()) { - std::stringstream ss; + std::ostringstream ss; ss << "Found profiles outside of " << settings.nixStateDir << "/profiles.\n" << "The generation this profile points to might not have a gcroot and could be\n" << "garbage collected, resulting in broken symlinks.\n\n"; for (auto & dir : dirs) ss << " " << dir << "\n"; ss << "\n"; - return checkFail(ss.str()); + return checkFail(toView(ss)); } return checkPass("All profiles are gcroots."); @@ -145,13 +145,13 @@ struct CmdConfigCheck : StoreCommand : PROTOCOL_VERSION; if (clientProto != storeProto) { - std::stringstream ss; + std::ostringstream ss; ss << "Warning: protocol version of this client does not match the store.\n" << "While this is not necessarily a problem it's recommended to keep the client in\n" << "sync with the daemon.\n\n" << "Client protocol: " << formatProtocol(clientProto) << "\n" << "Store protocol: " << formatProtocol(storeProto) << "\n\n"; - return checkFail(ss.str()); + return checkFail(toView(ss)); } return checkPass("Client protocol matches store protocol."); diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index 331cbdd88..35507d9ad 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -3,7 +3,7 @@ R""( # Description This command reads from standard input a JSON representation of a -[store derivation] to which an [*installable*](./nix.md#installables) evaluates. +[store derivation]. Store derivations are used internally by Nix. They are store paths with extension `.drv` that represent the build-time dependency graph to which diff --git a/src/nix/develop.cc b/src/nix/develop.cc index effc86a0a..9a95bc695 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -21,6 +21,8 @@ #include "strings.hh" +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; struct DevelopSettings : Config @@ -341,7 +343,7 @@ struct Common : InstallableCommand, MixProfile ref store, const BuildEnvironment & buildEnvironment, const std::filesystem::path & tmpDir, - const std::filesystem::path & outputsDir = std::filesystem::path { absPath(".") } / "outputs") + const std::filesystem::path & outputsDir = fs::path { fs::current_path() } / "outputs") { // A list of colon-separated environment variables that should be // prepended to, rather than overwritten, in order to keep the shell usable. @@ -450,7 +452,7 @@ struct Common : InstallableCommand, MixProfile auto targetFilePath = tmpDir / OS_STR(".attrs."); targetFilePath += ext; - writeFile(targetFilePath.string(), content); + writeFile(targetFilePath, content); auto fileInBuilderEnv = buildEnvironment.vars.find(envVar); assert(fileInBuilderEnv != buildEnvironment.vars.end()); @@ -608,6 +610,7 @@ struct CmdDevelop : Common, MixEnvironment else if (!command.empty()) { std::vector args; + args.reserve(command.size()); for (auto s : command) args.push_back(shellEscape(s)); script += fmt("exec %s\n", concatStringsSep(" ", args)); @@ -670,7 +673,7 @@ struct CmdDevelop : Common, MixEnvironment throw Error("package 'nixpkgs#bashInteractive' does not provide a 'bin/bash'"); } catch (Error &) { - ignoreException(); + ignoreExceptionExceptInterrupt(); } // Override SHELL with the one chosen for this environment. diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc index 953d77d31..98a059fa1 100644 --- a/src/nix/dump-path.cc +++ b/src/nix/dump-path.cc @@ -20,7 +20,7 @@ struct CmdDumpPath : StorePathCommand void run(ref store, const StorePath & storePath) override { - FdSink sink(getStandardOut()); + FdSink sink(getStandardOutput()); store->narFromPath(storePath, sink); sink.flush(); } @@ -55,7 +55,7 @@ struct CmdDumpPath2 : Command void run() override { - FdSink sink(getStandardOut()); + FdSink sink(getStandardOutput()); dumpPath(path, sink); sink.flush(); } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 494735516..7811b77ed 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -11,11 +11,13 @@ using namespace nix; +namespace nix::fs { using namespace std::filesystem; } + struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption { bool raw = false; std::optional apply; - std::optional writeTo; + std::optional writeTo; CmdEval() : InstallableValueCommand() { @@ -75,30 +77,27 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption if (writeTo) { stopProgressBar(); - if (pathExists(*writeTo)) - throw Error("path '%s' already exists", *writeTo); + if (fs::symlink_exists(*writeTo)) + throw Error("path '%s' already exists", writeTo->string()); - std::function recurse; + std::function recurse; - recurse = [&](Value & v, const PosIdx pos, const Path & path) + recurse = [&](Value & v, const PosIdx pos, const fs::path & path) { state->forceValue(v, pos); if (v.type() == nString) // FIXME: disallow strings with contexts? - writeFile(path, v.string_view()); + writeFile(path.string(), v.string_view()); else if (v.type() == nAttrs) { - if (mkdir(path.c_str() -#ifndef _WIN32 // TODO abstract mkdir perms for Windows - , 0777 -#endif - ) == -1) - throw SysError("creating directory '%s'", path); + [[maybe_unused]] bool directoryCreated = fs::create_directory(path); + // Directory should not already exist + assert(directoryCreated); for (auto & attr : *v.attrs()) { std::string_view name = state->symbols[attr.name]; try { if (name == "." || name == "..") throw Error("invalid file name '%s'", name); - recurse(*attr.value, attr.pos, concatStrings(path, "/", name)); + recurse(*attr.value, attr.pos, path / name); } catch (Error & e) { e.addTrace( state->positions[attr.pos], @@ -116,7 +115,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption else if (raw) { stopProgressBar(); - writeFull(getStandardOut(), *state->coerceToString(noPos, *v, context, "while generating the eval command output")); + writeFull(getStandardOutput(), *state->coerceToString(noPos, *v, context, "while generating the eval command output")); } else if (json) { diff --git a/src/nix/eval.md b/src/nix/eval.md index 48d5aa597..bd5b035e1 100644 --- a/src/nix/eval.md +++ b/src/nix/eval.md @@ -50,8 +50,9 @@ R""( # Description -This command evaluates the given Nix expression and prints the -result on standard output. +This command evaluates the given Nix expression, and prints the result on standard output. + +It also evaluates any nested attribute values and list items. # Output format diff --git a/src/nix/flake-archive.md b/src/nix/flake-archive.md index 85bbeeb16..18c735b11 100644 --- a/src/nix/flake-archive.md +++ b/src/nix/flake-archive.md @@ -22,8 +22,20 @@ R""( # nix flake archive --json --dry-run nixops ``` +* Upload all flake inputs to a different machine for remote evaluation + + ``` + # nix flake archive --to ssh://some-machine + ``` + + On the remote machine the flake can then be accessed via its store path. That's computed like this: + + ``` + # nix flake metadata --json | jq -r '.path' + ``` + # Description -FIXME +Copy a flake and all its inputs to a store. This is useful i.e. to evaluate flakes on a different host. )"" diff --git a/src/nix/flake-lock.md b/src/nix/flake-lock.md index 6d10258e3..d13666a4c 100644 --- a/src/nix/flake-lock.md +++ b/src/nix/flake-lock.md @@ -30,9 +30,9 @@ R""( # Description -This command adds inputs to the lock file of a flake (`flake.lock`) -so that it contains a lock for every flake input specified in -`flake.nix`. Existing lock file entries are not updated. +This command updates the lock file of a flake (`flake.lock`) +so that it contains an up-to-date lock for every flake input specified in +`flake.nix`. Lock file entries are aready up-to-date are not modified. If you want to update existing lock entries, use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) diff --git a/src/nix/flake-update.md b/src/nix/flake-update.md index 63df3b12a..8b0159ff7 100644 --- a/src/nix/flake-update.md +++ b/src/nix/flake-update.md @@ -25,6 +25,19 @@ R""( → 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' (2023-07-05) ``` +* Update multiple inputs: + + ```console + # nix flake update nixpkgs nixpkgs-unstable + warning: updating lock file '/home/myself/repos/testflake/flake.lock': + • Updated input 'nixpkgs': + 'github:nixos/nixpkgs/8f7492cce28977fbf8bd12c72af08b1f6c7c3e49' (2024-09-14) + → 'github:nixos/nixpkgs/086b448a5d54fd117f4dc2dee55c9f0ff461bdc1' (2024-09-16) + • Updated input 'nixpkgs-unstable': + 'github:nixos/nixpkgs/345c263f2f53a3710abe117f28a5cb86d0ba4059' (2024-09-13) + → 'github:nixos/nixpkgs/99dc8785f6a0adac95f5e2ab05cc2e1bf666d172' (2024-09-16) + ``` + * Update only a single input of a flake in a different directory: ```console diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 3f9f8f99b..3a54763a1 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -18,11 +18,14 @@ #include "markdown.hh" #include "users.hh" +#include #include #include #include "strings-inline.hh" +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; using namespace nix::flake; using json = nlohmann::json; @@ -49,7 +52,7 @@ public: FlakeRef getFlakeRef() { - return parseFlakeRef(fetchSettings, flakeUrl, absPath(".")); //FIXME + return parseFlakeRef(fetchSettings, flakeUrl, fs::current_path().string()); //FIXME } LockedFlake lockFlake() @@ -61,7 +64,7 @@ public: { return { // Like getFlakeRef but with expandTilde calld first - parseFlakeRef(fetchSettings, expandTilde(flakeUrl), absPath(".")) + parseFlakeRef(fetchSettings, expandTilde(flakeUrl), fs::current_path().string()) }; } }; @@ -210,7 +213,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON auto & flake = lockedFlake.flake; // Currently, all flakes are in the Nix store via the rootFS accessor. - auto storePath = store->printStorePath(store->toStorePath(flake.path.path.abs()).first); + auto storePath = store->printStorePath(sourcePathToStorePath(store, flake.path).first); if (json) { nlohmann::json j; @@ -368,9 +371,11 @@ struct CmdFlakeCheck : FlakeCommand auto reportError = [&](const Error & e) { try { throw e; + } catch (Interrupted & e) { + throw; } catch (Error & e) { if (settings.keepGoing) { - ignoreException(); + ignoreExceptionExceptInterrupt(); hasErrors = true; } else @@ -437,14 +442,39 @@ struct CmdFlakeCheck : FlakeCommand auto checkApp = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - #if 0 - // FIXME - auto app = App(*state, v); - for (auto & i : app.context) { - auto [drvPathS, outputName] = NixStringContextElem::parse(i); - store->parseStorePath(drvPathS); + Activity act(*logger, lvlInfo, actUnknown, fmt("checking app '%s'", attrPath)); + state->forceAttrs(v, pos, ""); + if (auto attr = v.attrs()->get(state->symbols.create("type"))) + state->forceStringNoCtx(*attr->value, attr->pos, ""); + else + throw Error("app '%s' lacks attribute 'type'", attrPath); + + if (auto attr = v.attrs()->get(state->symbols.create("program"))) { + if (attr->name == state->symbols.create("program")) { + NixStringContext context; + state->forceString(*attr->value, context, attr->pos, ""); + } + } else + throw Error("app '%s' lacks attribute 'program'", attrPath); + + if (auto attr = v.attrs()->get(state->symbols.create("meta"))) { + state->forceAttrs(*attr->value, attr->pos, ""); + if (auto dAttr = attr->value->attrs()->get(state->symbols.create("description"))) + state->forceStringNoCtx(*dAttr->value, dAttr->pos, ""); + else + logWarning({ + .msg = HintFmt("app '%s' lacks attribute 'meta.description'", attrPath), + }); + } else + logWarning({ + .msg = HintFmt("app '%s' lacks attribute 'meta'", attrPath), + }); + + for (auto & attr : *v.attrs()) { + std::string_view name(state->symbols[attr.name]); + if (name != "type" && name != "program" && name != "meta") + throw Error("app '%s' has unsupported attribute '%s'", attrPath, name); } - #endif } catch (Error & e) { e.addTrace(resolve(pos), HintFmt("while checking the app definition '%s'", attrPath)); reportError(e); @@ -629,7 +659,7 @@ struct CmdFlakeCheck : FlakeCommand const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); if (checkSystemType(attr_name, attr.pos)) { - checkApp( + checkDerivation( fmt("%s.%s", name, attr_name), *attr.value, attr.pos); }; @@ -851,7 +881,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto evalState = getEvalState(); auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment( - fetchSettings, templateUrl, absPath(".")); + fetchSettings, templateUrl, fs::current_path().string()); auto installable = InstallableFlake(nullptr, evalState, std::move(templateFlakeRef), templateName, ExtendedOutputsSpec::Default(), @@ -870,27 +900,28 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand "If you've set '%s' to a string, try using a path instead.", templateDir, templateDirAttr->getAttrPathStr()).debugThrow(); - std::vector changedFiles; - std::vector conflictedFiles; + std::vector changedFiles; + std::vector conflictedFiles; - std::function copyDir; - copyDir = [&](const Path & from, const Path & to) + std::function copyDir; + copyDir = [&](const fs::path & from, const fs::path & to) { - createDirs(to); + fs::create_directories(to); - for (auto & entry : std::filesystem::directory_iterator{from}) { + for (auto & entry : fs::directory_iterator{from}) { checkInterrupt(); - auto from2 = entry.path().string(); - auto to2 = to + "/" + entry.path().filename().string(); - auto st = lstat(from2); - if (S_ISDIR(st.st_mode)) + auto from2 = entry.path(); + auto to2 = to / entry.path().filename(); + auto st = entry.symlink_status(); + auto to_st = fs::symlink_status(to2); + if (fs::is_directory(st)) copyDir(from2, to2); - else if (S_ISREG(st.st_mode)) { - auto contents = readFile(from2); - if (pathExists(to2)) { - auto contents2 = readFile(to2); + else if (fs::is_regular_file(st)) { + auto contents = readFile(from2.string()); + if (fs::exists(to_st)) { + auto contents2 = readFile(to2.string()); if (contents != contents2) { - printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2); + printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2.string(), from2.string()); conflictedFiles.push_back(to2); } else { notice("skipping identical file: %s", from2); @@ -899,18 +930,18 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand } else writeFile(to2, contents); } - else if (S_ISLNK(st.st_mode)) { - auto target = readLink(from2); - if (pathExists(to2)) { - if (readLink(to2) != target) { - printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2); + else if (fs::is_symlink(st)) { + auto target = fs::read_symlink(from2); + if (fs::exists(to_st)) { + if (fs::read_symlink(to2) != target) { + printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2.string(), from2.string()); conflictedFiles.push_back(to2); } else { notice("skipping identical file: %s", from2); } continue; } else - createSymlink(target, to2); + fs::create_symlink(target, to2); } else throw Error("file '%s' has unsupported type", from2); @@ -921,9 +952,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand copyDir(templateDir, flakeDir); - if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) { + if (!changedFiles.empty() && fs::exists(std::filesystem::path{flakeDir} / ".git")) { Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" }; - for (auto & s : changedFiles) args.push_back(s); + for (auto & s : changedFiles) args.emplace_back(s.string()); runProgram("git", true, args); } auto welcomeText = cursor->maybeGetAttr("welcomeText"); @@ -1049,7 +1080,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun StorePathSet sources; - auto storePath = store->toStorePath(flake.flake.path.path.abs()).first; + auto storePath = sourcePathToStorePath(store, flake.flake.path).first; sources.insert(storePath); @@ -1243,6 +1274,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON auto showDerivation = [&]() { auto name = visitor.getAttr(state->sName)->getString(); + if (json) { std::optional description; if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { @@ -1251,8 +1283,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } j.emplace("type", "derivation"); j.emplace("name", name); - if (description) - j.emplace("description", *description); + j.emplace("description", description ? *description : ""); } else { logger->cout("%s: %s '%s'", headerPrefix, @@ -1340,12 +1371,19 @@ struct CmdFlakeShow : FlakeCommand, MixJSON (attrPath.size() == 3 && attrPathS[0] == "apps")) { auto aType = visitor.maybeGetAttr("type"); + std::optional description; + if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { + if (auto aDescription = aMeta->maybeGetAttr(state->sDescription)) + description = aDescription->getString(); + } if (!aType || aType->getString() != "app") state->error("not an app definition").debugThrow(); if (json) { j.emplace("type", "app"); + if (description) + j.emplace("description", *description); } else { - logger->cout("%s: app", headerPrefix); + logger->cout("%s: app: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description ? *description : "no description"); } } diff --git a/src/nix/flake.md b/src/nix/flake.md index 46d5a3867..2b999431c 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -120,7 +120,7 @@ Contrary to URL-like references, path-like flake references can contain arbitrar ### Examples -* `.`: The flake to which the current directory belongs to. +* `.`: The flake to which the current directory belongs. * `/home/alice/src/patchelf`: A flake in some other directory. * `./../sub directory/with Ûñî©ôδ€`: A flake in another relative directory that has Unicode characters in its name. @@ -565,8 +565,9 @@ or NixOS modules, which are composed into the top-level flake's Inputs specified in `flake.nix` are typically "unlocked" in the sense that they don't specify an exact revision. To ensure reproducibility, Nix will automatically generate and use a *lock file* called -`flake.lock` in the flake's directory. The lock file contains a graph -structure isomorphic to the graph of dependencies of the root +`flake.lock` in the flake's directory. +The lock file is a UTF-8 JSON file. +It contains a graph structure isomorphic to the graph of dependencies of the root flake. Each node in the graph (except the root node) maps the (usually) unlocked input specifications in `flake.nix` to locked input specifications. Each node also contains some metadata, such as the diff --git a/src/nix/fmt.cc b/src/nix/fmt.cc index d65834495..f444d6add 100644 --- a/src/nix/fmt.cc +++ b/src/nix/fmt.cc @@ -40,14 +40,8 @@ struct CmdFmt : SourceExprCommand { Strings programArgs{app.program}; // Propagate arguments from the CLI - if (args.empty()) { - // Format the current flake out of the box - programArgs.push_back("."); - } else { - // User wants more power, let them decide which paths to include/exclude - for (auto &i : args) { - programArgs.push_back(i); - } + for (auto &i : args) { + programArgs.push_back(i); } // Release our references to eval caches to ensure they are persisted to disk, because diff --git a/src/nix/fmt.md b/src/nix/fmt.md index 1c78bb36f..a2afde61c 100644 --- a/src/nix/fmt.md +++ b/src/nix/fmt.md @@ -1,5 +1,14 @@ R""( +# Description + +`nix fmt` calls the formatter specified in the flake. + +Flags can be forwarded to the formatter by using `--` followed by the flags. + +Any arguments will be forwarded to the formatter. Typically these are the files to format. + + # Examples With [nixpkgs-fmt](https://github.com/nix-community/nixpkgs-fmt): @@ -13,10 +22,6 @@ With [nixpkgs-fmt](https://github.com/nix-community/nixpkgs-fmt): } ``` -- Format the current flake: `$ nix fmt` - -- Format a specific folder or file: `$ nix fmt ./folder ./file.nix` - With [nixfmt](https://github.com/serokell/nixfmt): ```nix @@ -28,8 +33,6 @@ With [nixfmt](https://github.com/serokell/nixfmt): } ``` -- Format specific files: `$ nix fmt ./file1.nix ./file2.nix` - With [Alejandra](https://github.com/kamadorueda/alejandra): ```nix @@ -41,13 +44,4 @@ With [Alejandra](https://github.com/kamadorueda/alejandra): } ``` -- Format the current flake: `$ nix fmt` - -- Format a specific folder or file: `$ nix fmt ./folder ./file.nix` - -# Description - -`nix fmt` will rewrite all Nix files (\*.nix) to a canonical format -using the formatter specified in your flake. - )"" diff --git a/src/nix/help-stores.md b/src/nix/help-stores.md index 5c5624f5e..ff9d39c50 120000 --- a/src/nix/help-stores.md +++ b/src/nix/help-stores.md @@ -1 +1 @@ -../../doc/manual/src/store/types/index.md.in \ No newline at end of file +../../doc/manual/source/store/types/index.md.in \ No newline at end of file diff --git a/src/nix/log.cc b/src/nix/log.cc index 7f590c708..1a6f48f5e 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -57,7 +57,7 @@ struct CmdLog : InstallableCommand if (!log) continue; stopProgressBar(); printInfo("got build log for '%s' from '%s'", installable->what(), logSub.getUri()); - writeFull(getStandardOut(), *log); + writeFull(getStandardOutput(), *log); return; } diff --git a/src/nix/main.cc b/src/nix/main.cc index 34de79ac8..eff2d60a4 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -2,7 +2,6 @@ #include "current-process.hh" #include "command.hh" #include "common-args.hh" -#include "eval-gc.hh" #include "eval.hh" #include "eval-settings.hh" #include "globals.hh" @@ -20,6 +19,7 @@ #include "eval-cache.hh" #include "flake/flake.hh" #include "self-exe.hh" +#include "json-utils.hh" #include #include diff --git a/src/nix/meson.build b/src/nix/meson.build index 798c98e33..60ee48035 100644 --- a/src/nix/meson.build +++ b/src/nix/meson.build @@ -7,6 +7,7 @@ project('nix', 'cpp', 'debug=true', 'optimization=2', 'errorlogs=true', # Please print logs for tests that fail + 'localstatedir=/nix/var', ], meson_version : '>= 1.1', license : 'LGPL-2.1-or-later', @@ -16,9 +17,11 @@ cxx = meson.get_compiler('cpp') subdir('build-utils-meson/deps-lists') +nix_store = dependency('nix-store') + deps_private_maybe_subproject = [ dependency('nix-util'), - dependency('nix-store'), + nix_store, dependency('nix-expr'), dependency('nix-flake'), dependency('nix-fetchers'), @@ -32,6 +35,7 @@ subdir('build-utils-meson/subprojects') subdir('build-utils-meson/threads') subdir('build-utils-meson/export-all-symbols') +subdir('build-utils-meson/windows-version') configdata = configuration_data() @@ -209,18 +213,25 @@ nix_symlinks = [ 'nix-store', ] +executable_suffix = '' +if host_machine.system() == 'windows' + executable_suffix = '.exe' +endif + foreach linkname : nix_symlinks install_symlink( - linkname, + linkname + executable_suffix, # TODO(Qyriad): should these continue to be relative symlinks? - pointing_to : 'nix', + pointing_to : fs.name(this_exe), install_dir : get_option('bindir'), # The 'runtime' tag is what executables default to, which we want to emulate here. install_tag : 'runtime' ) t = custom_target( command: ['ln', '-sf', fs.name(this_exe), '@OUTPUT@'], - output: linkname, + output: linkname + executable_suffix, + # native doesn't allow dangling symlinks, but the target executable often doesn't exist at this time + env : { 'MSYS' : 'winsymlinks:lnk' }, # TODO(Ericson2314): Don't do this once we have the `meson.override_find_program` working) build_by_default: true ) @@ -230,17 +241,28 @@ endforeach install_symlink( 'build-remote', - pointing_to : '..' / '..'/ get_option('bindir') / 'nix', - install_dir : get_option('libexecdir') / 'nix', + pointing_to : '..' / '..'/ get_option('bindir') / fs.name(this_exe), + install_dir : get_option('libexecdir') / fs.name(this_exe), # The 'runtime' tag is what executables default to, which we want to emulate here. install_tag : 'runtime' ) custom_target( command: ['ln', '-sf', fs.name(this_exe), '@OUTPUT@'], - output: 'build-remote', + output: 'build-remote' + executable_suffix, + # native doesn't allow dangling symlinks, but the target executable often doesn't exist at this time + env : { 'MSYS' : 'winsymlinks:lnk' }, # TODO(Ericson2314): Don't do this once we have the `meson.override_find_program` working) build_by_default: true ) # TODO(Ericson3214): Dosen't yet work #meson.override_find_program(linkname, t) + +localstatedir = nix_store.get_variable( + 'localstatedir', + default_value : get_option('localstatedir'), +) +assert(localstatedir == get_option('localstatedir')) +store_dir = nix_store.get_variable('storedir') +subdir('scripts') +subdir('misc') diff --git a/src/nix/meson.options b/src/nix/meson.options new file mode 100644 index 000000000..8430dd669 --- /dev/null +++ b/src/nix/meson.options @@ -0,0 +1,6 @@ +# vim: filetype=meson + +# A relative path means it gets appended to prefix. +option('profile-dir', type : 'string', value : 'etc/profile.d', + description : 'the path to install shell profile files', +) diff --git a/src/nix/misc b/src/nix/misc new file mode 120000 index 000000000..2825552c9 --- /dev/null +++ b/src/nix/misc @@ -0,0 +1 @@ +../../misc \ No newline at end of file diff --git a/src/nix/nix.md b/src/nix/nix.md index 443757a05..b88bd9a94 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -302,7 +302,7 @@ or with an **expression**: terraform "$@" ``` -or with cascading interpreters. Note that the `#! nix` lines don't need to follow after the first line, to accomodate other interpreters. +or with cascading interpreters. Note that the `#! nix` lines don't need to follow after the first line, to accommodate other interpreters. ``` #!/usr/bin/env nix diff --git a/src/nix/package.nix b/src/nix/package.nix index ef7265458..c7b24efce 100644 --- a/src/nix/package.nix +++ b/src/nix/package.nix @@ -1,21 +1,12 @@ { lib , stdenv -, mkMesonDerivation -, releaseTools - -, meson -, ninja -, pkg-config +, mkMesonExecutable , nix-store , nix-expr , nix-main , nix-cmd -, rapidcheck -, gtest -, runCommand - # Configuration Options , version @@ -25,7 +16,7 @@ let inherit (lib) fileset; in -mkMesonDerivation (finalAttrs: { +mkMesonExecutable (finalAttrs: { pname = "nix"; inherit version; @@ -36,9 +27,10 @@ mkMesonDerivation (finalAttrs: { ../../.version ./.version ./meson.build - # ./meson.options + ./meson.options # Symbolic links to other dirs + ## exes ./build-remote ./doc ./nix-build @@ -48,6 +40,11 @@ mkMesonDerivation (finalAttrs: { ./nix-env ./nix-instantiate ./nix-store + ## dirs + ./scripts + ../../scripts + ./misc + ../../misc # Doc nix files for --help ../../doc/manual/generate-manpage.nix @@ -60,9 +57,9 @@ mkMesonDerivation (finalAttrs: { ../nix-env/buildenv.nix ./get-env.sh ./help-stores.md - ../../doc/manual/src/store/types/index.md.in + ../../doc/manual/source/store/types/index.md.in ./profiles.md - ../../doc/manual/src/command-ref/files/profiles.md + ../../doc/manual/source/command-ref/files/profiles.md # Files ] ++ lib.concatMap @@ -84,12 +81,6 @@ mkMesonDerivation (finalAttrs: { ] ); - nativeBuildInputs = [ - meson - ninja - pkg-config - ]; - buildInputs = [ nix-store nix-expr @@ -112,10 +103,6 @@ mkMesonDerivation (finalAttrs: { LDFLAGS = "-fuse-ld=gold"; }; - separateDebugInfo = !stdenv.hostPlatform.isStatic; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - meta = { platforms = lib.platforms.unix ++ lib.platforms.windows; }; diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 7b1ae5b3d..295b2d32c 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -123,13 +123,12 @@ struct ProfileManifest ProfileManifest() { } - ProfileManifest(EvalState & state, const Path & profile) + ProfileManifest(EvalState & state, const std::filesystem::path & profile) { - auto manifestPath = profile + "/manifest.json"; + auto manifestPath = profile / "manifest.json"; - if (pathExists(manifestPath)) { - auto json = nlohmann::json::parse(readFile(manifestPath)); - std::set foundNames; + if (std::filesystem::exists(manifestPath)) { + auto json = nlohmann::json::parse(readFile(manifestPath.string())); auto version = json.value("version", 0); std::string sUrl; @@ -180,12 +179,12 @@ struct ProfileManifest } } - else if (pathExists(profile + "/manifest.nix")) { + else if (std::filesystem::exists(profile / "manifest.nix")) { // FIXME: needed because of pure mode; ugly. - state.allowPath(state.store->followLinksToStore(profile)); - state.allowPath(state.store->followLinksToStore(profile + "/manifest.nix")); + state.allowPath(state.store->followLinksToStore(profile.string())); + state.allowPath(state.store->followLinksToStore((profile / "manifest.nix").string())); - auto packageInfos = queryInstalled(state, state.store->followLinksToStore(profile)); + auto packageInfos = queryInstalled(state, state.store->followLinksToStore(profile.string())); for (auto & packageInfo : packageInfos) { ProfileElement element; diff --git a/src/nix/profiles.md b/src/nix/profiles.md index c67a86194..f65e71065 120000 --- a/src/nix/profiles.md +++ b/src/nix/profiles.md @@ -1 +1 @@ -../../doc/manual/src/command-ref/files/profiles.md \ No newline at end of file +../../doc/manual/source/command-ref/files/profiles.md \ No newline at end of file diff --git a/src/nix/run.cc b/src/nix/run.cc index fdca02b88..0f6569bfc 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -12,6 +12,7 @@ #include "source-accessor.hh" #include "progress-bar.hh" #include "eval.hh" +#include #if __linux__ # include @@ -20,6 +21,8 @@ #include +namespace nix::fs { using namespace std::filesystem; } + using namespace nix; std::string chrootHelperName = "__run_in_chroot"; @@ -73,7 +76,7 @@ void execProgramInStore(ref store, } -struct CmdRun : InstallableValueCommand +struct CmdRun : InstallableValueCommand, MixEnvironment { using InstallableCommand::run; @@ -133,6 +136,8 @@ struct CmdRun : InstallableValueCommand // we are about to exec out of this process without running C++ destructors. state->evalCaches.clear(); + setEnviron(); + execProgramInStore(store, UseLookupPath::DontUse, app.program, allArgs); } }; @@ -163,32 +168,31 @@ void chrootHelper(int argc, char * * argv) /* Bind-mount realStoreDir on /nix/store. If the latter mount point doesn't already exists, we have to create a chroot environment containing the mount point and bind mounts for the - children of /. Would be nice if we could use overlayfs here, - but that doesn't work in a user namespace yet (Ubuntu has a - patch for this: - https://bugs.launchpad.net/ubuntu/+source/linux/+bug/1478578). */ + children of /. + Overlayfs for user namespaces is fixed in Linux since ac519625ed + (v5.11, 14 February 2021) */ if (!pathExists(storeDir)) { // FIXME: Use overlayfs? - Path tmpDir = createTempDir(); + fs::path tmpDir = createTempDir(); createDirs(tmpDir + storeDir); if (mount(realStoreDir.c_str(), (tmpDir + storeDir).c_str(), "", MS_BIND, 0) == -1) throw SysError("mounting '%s' on '%s'", realStoreDir, storeDir); - for (auto entry : std::filesystem::directory_iterator{"/"}) { + for (auto entry : fs::directory_iterator{"/"}) { checkInterrupt(); - auto src = entry.path().string(); - Path dst = tmpDir + "/" + entry.path().filename().string(); + auto src = entry.path(); + fs::path dst = tmpDir / entry.path().filename(); if (pathExists(dst)) continue; - auto st = lstat(src); - if (S_ISDIR(st.st_mode)) { + auto st = entry.symlink_status(); + if (fs::is_directory(st)) { if (mkdir(dst.c_str(), 0700) == -1) throw SysError("creating directory '%s'", dst); if (mount(src.c_str(), dst.c_str(), "", MS_BIND | MS_REC, 0) == -1) throw SysError("mounting '%s' on '%s'", src, dst); - } else if (S_ISLNK(st.st_mode)) + } else if (fs::is_symlink(st)) createSymlink(readLink(src), dst); } @@ -202,12 +206,13 @@ void chrootHelper(int argc, char * * argv) if (chdir(cwd) == -1) throw SysError("chdir to '%s' in chroot", cwd); } else - if (mount(realStoreDir.c_str(), storeDir.c_str(), "", MS_BIND, 0) == -1) - throw SysError("mounting '%s' on '%s'", realStoreDir, storeDir); + if (mount("overlay", storeDir.c_str(), "overlay", MS_MGC_VAL, fmt("lowerdir=%s:%s", storeDir, realStoreDir).c_str()) == -1) + if (mount(realStoreDir.c_str(), storeDir.c_str(), "", MS_BIND, 0) == -1) + throw SysError("mounting '%s' on '%s'", realStoreDir, storeDir); - writeFile("/proc/self/setgroups", "deny"); - writeFile("/proc/self/uid_map", fmt("%d %d %d", uid, uid, 1)); - writeFile("/proc/self/gid_map", fmt("%d %d %d", gid, gid, 1)); + writeFile(fs::path{"/proc/self/setgroups"}, "deny"); + writeFile(fs::path{"/proc/self/uid_map"}, fmt("%d %d %d", uid, uid, 1)); + writeFile(fs::path{"/proc/self/gid_map"}, fmt("%d %d %d", gid, gid, 1)); #if __linux__ if (system != "") diff --git a/src/nix/run.md b/src/nix/run.md index 250ea65aa..eb96e6b31 100644 --- a/src/nix/run.md +++ b/src/nix/run.md @@ -80,6 +80,7 @@ An app is specified by a flake output attribute named apps.x86_64-linux.blender_2_79 = { type = "app"; program = "${self.packages.x86_64-linux.blender_2_79}/bin/blender"; + meta.description = "Run Blender, a free and open-source 3D creation suite."; }; ``` @@ -90,4 +91,6 @@ The only supported attributes are: * `program` (required): The full path of the executable to run. It must reside in the Nix store. +* `meta.description` (optional): A description of the app. + )"" diff --git a/src/nix/scripts b/src/nix/scripts new file mode 120000 index 000000000..c5efc95eb --- /dev/null +++ b/src/nix/scripts @@ -0,0 +1 @@ +../../scripts \ No newline at end of file diff --git a/src/nix/self-exe.cc b/src/nix/self-exe.cc index a260bafd5..77d20a835 100644 --- a/src/nix/self-exe.cc +++ b/src/nix/self-exe.cc @@ -5,17 +5,20 @@ namespace nix { -namespace fs = std::filesystem; +namespace fs { +using namespace std::filesystem; +} fs::path getNixBin(std::optional binaryNameOpt) { auto getBinaryName = [&] { return binaryNameOpt ? *binaryNameOpt : "nix"; }; - // If the environment variable is set, use it unconditionally + // If the environment variable is set, use it unconditionally. if (auto envOpt = getEnvNonEmpty("NIX_BIN_DIR")) return fs::path{*envOpt} / std::string{getBinaryName()}; - // Use some-times avaiable OS tricks to get to the path of this Nix, and try that + // Try OS tricks, if available, to get to the path of this Nix, and + // see if we can find the right executable next to that. if (auto selfOpt = getSelfExe()) { fs::path path{*selfOpt}; if (binaryNameOpt) diff --git a/src/nix/self-exe.hh b/src/nix/self-exe.hh index 0772afa67..3161553ec 100644 --- a/src/nix/self-exe.hh +++ b/src/nix/self-exe.hh @@ -17,9 +17,9 @@ namespace nix { * Instead, we'll query the OS for the path to the current executable, * using `getSelfExe()`. * - * As a last resort, we resort to `PATH`. Hopefully we find a `nix` - * there that's compatible. If you're porting Nix to a new platform, - * that might be good enough for a while, but you'll want to improve + * As a last resort, we rely on `PATH`. Hopefully we find a `nix` there + * that's compatible. If you're porting Nix to a new platform, that + * might be good enough for a while, but you'll want to improve * `getSelfExe()` to work on your platform. * * @param binary_name the exact binary name we're looking up. Might be diff --git a/src/nix/shell.md b/src/nix/shell.md index 7c315fb3f..677151a85 100644 --- a/src/nix/shell.md +++ b/src/nix/shell.md @@ -48,7 +48,7 @@ R""( # Description `nix shell` runs a command in an environment in which the `$PATH` variable -provides the specified [*installables*](./nix.md#installable). If no command is specified, it starts the +provides the specified [*installables*](./nix.md#installables). If no command is specified, it starts the default shell of your user account specified by `$SHELL`. # Use as a `#!`-interpreter diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 1e277cbbe..2afe4b267 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -177,7 +177,7 @@ struct CmdKeyGenerateSecret : Command throw UsageError("required argument '--key-name' is missing"); stopProgressBar(); - writeFull(getStandardOut(), SecretKey::generate(*keyName).to_string()); + writeFull(getStandardOutput(), SecretKey::generate(*keyName).to_string()); } }; @@ -199,7 +199,7 @@ struct CmdKeyConvertSecretToPublic : Command { SecretKey secretKey(drainFD(STDIN_FILENO)); stopProgressBar(); - writeFull(getStandardOut(), secretKey.toPublicKey().to_string()); + writeFull(getStandardOutput(), secretKey.toPublicKey().to_string()); } }; diff --git a/src/nix/unix/daemon.cc b/src/nix/unix/daemon.cc index 66d8dbcf0..746963a01 100644 --- a/src/nix/unix/daemon.cc +++ b/src/nix/unix/daemon.cc @@ -33,6 +33,10 @@ #include #include +#if __linux__ +#include "cgroup.hh" +#endif + #if __APPLE__ || __FreeBSD__ #include #endif @@ -312,6 +316,27 @@ static void daemonLoop(std::optional forceTrustClientOpt) // Get rid of children automatically; don't let them become zombies. setSigChldAction(true); + #if __linux__ + if (settings.useCgroups) { + experimentalFeatureSettings.require(Xp::Cgroups); + + // This also sets the root cgroup to the current one. + auto rootCgroup = getRootCgroup(); + auto cgroupFS = getCgroupFS(); + if (!cgroupFS) + throw Error("cannot determine the cgroups file system"); + auto rootCgroupPath = canonPath(*cgroupFS + "/" + rootCgroup); + if (!pathExists(rootCgroupPath)) + throw Error("expected cgroup directory '%s'", rootCgroupPath); + auto daemonCgroupPath = rootCgroupPath + "/nix-daemon"; + // Create new sub-cgroup for the daemon. + if (mkdir(daemonCgroupPath.c_str(), 0755) != 0 && errno != EEXIST) + throw SysError("creating cgroup '%s'", daemonCgroupPath); + // Move daemon into the new cgroup. + writeFile(daemonCgroupPath + "/cgroup.procs", fmt("%d", getpid())); + } + #endif + // Loop accepting connections. while (1) { diff --git a/src/perl/package.nix b/src/perl/package.nix index 0b9343fba..fe617fd47 100644 --- a/src/perl/package.nix +++ b/src/perl/package.nix @@ -1,13 +1,10 @@ { lib , stdenv , mkMesonDerivation +, pkg-config , perl , perlPackages -, meson -, ninja -, pkg-config , nix-store -, darwin , version , curl , bzip2 @@ -36,8 +33,6 @@ perl.pkgs.toPerlModule (mkMesonDerivation (finalAttrs: { ]); nativeBuildInputs = [ - meson - ninja pkg-config perl curl diff --git a/tests/functional/binary-cache.sh b/tests/functional/binary-cache.sh index 6a177b657..ff39ab3b7 100755 --- a/tests/functional/binary-cache.sh +++ b/tests/functional/binary-cache.sh @@ -238,7 +238,7 @@ clearCache # preserve quotes variables in the single-quoted string # shellcheck disable=SC2016 outPath=$(nix-build --no-out-link -E ' - with import ./config.nix; + with import '"${config_nix}"'; mkDerivation { name = "nar-listing"; buildCommand = "mkdir $out; echo foo > $out/bar; ln -s xyzzy $out/link"; @@ -258,7 +258,7 @@ clearCache # preserve quotes variables in the single-quoted string # shellcheck disable=SC2016 outPath=$(nix-build --no-out-link -E ' - with import ./config.nix; + with import '"${config_nix}"'; mkDerivation { name = "debug-info"; buildCommand = "mkdir -p $out/lib/debug/.build-id/02; echo foo > $out/lib/debug/.build-id/02/623eda209c26a59b1a8638ff7752f6b945c26b.debug"; @@ -276,7 +276,7 @@ diff -u \ # preserve quotes variables in the single-quoted string # shellcheck disable=SC2016 expr=' - with import ./config.nix; + with import '"${config_nix}"'; mkDerivation { name = "multi-output"; buildCommand = "mkdir -p $out; echo foo > $doc; echo $doc > $out/docref"; diff --git a/tests/functional/build-hook-ca-fixed.nix b/tests/functional/build-hook-ca-fixed.nix index 0ce6d9b12..427ec2c31 100644 --- a/tests/functional/build-hook-ca-fixed.nix +++ b/tests/functional/build-hook-ca-fixed.nix @@ -1,6 +1,6 @@ { busybox }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let diff --git a/tests/functional/build-hook.nix b/tests/functional/build-hook.nix index 99a13aee4..1f0e17a3b 100644 --- a/tests/functional/build-hook.nix +++ b/tests/functional/build-hook.nix @@ -1,6 +1,6 @@ { busybox, contentAddressed ? false }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let diff --git a/tests/functional/build.sh b/tests/functional/build.sh index 5396a465f..3f65a7c2c 100755 --- a/tests/functional/build.sh +++ b/tests/functional/build.sh @@ -84,6 +84,7 @@ expectStderr 1 nix build --expr '""' --no-link \ | grepQuiet "has 0 entries in its context. It should only have exactly one entry" # Too much string context +# shellcheck disable=SC2016 # The ${} in this is Nix, not shell expectStderr 1 nix build --impure --expr 'with (import ./multiple-outputs.nix).e.a_a; "${drvPath}${outPath}"' --no-link \ | grepQuiet "has 2 entries in its context. It should only have exactly one entry" @@ -160,7 +161,7 @@ printf "%s\n" "$drv^*" | nix build --no-link --stdin --json | jq --exit-status ' out="$(nix build -f fod-failing.nix -L 2>&1)" && status=0 || status=$? test "$status" = 1 # one "hash mismatch" error, one "build of ... failed" -test "$(<<<"$out" grep -E '^error:' | wc -l)" = 2 +test "$(<<<"$out" grep -cE '^error:')" = 2 <<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x1\\.drv'" <<<"$out" grepQuiet -vE "hash mismatch in fixed-output derivation '.*-x3\\.drv'" <<<"$out" grepQuiet -vE "hash mismatch in fixed-output derivation '.*-x2\\.drv'" @@ -169,7 +170,7 @@ test "$(<<<"$out" grep -E '^error:' | wc -l)" = 2 out="$(nix build -f fod-failing.nix -L x1 x2 x3 --keep-going 2>&1)" && status=0 || status=$? test "$status" = 1 # three "hash mismatch" errors - for each failing fod, one "build of ... failed" -test "$(<<<"$out" grep -E '^error:' | wc -l)" = 4 +test "$(<<<"$out" grep -cE '^error:')" = 4 <<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x1\\.drv'" <<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x3\\.drv'" <<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x2\\.drv'" @@ -177,13 +178,13 @@ test "$(<<<"$out" grep -E '^error:' | wc -l)" = 4 out="$(nix build -f fod-failing.nix -L x4 2>&1)" && status=0 || status=$? test "$status" = 1 -test "$(<<<"$out" grep -E '^error:' | wc -l)" = 2 +test "$(<<<"$out" grep -cE '^error:')" = 2 <<<"$out" grepQuiet -E "error: 1 dependencies of derivation '.*-x4\\.drv' failed to build" <<<"$out" grepQuiet -E "hash mismatch in fixed-output derivation '.*-x2\\.drv'" out="$(nix build -f fod-failing.nix -L x4 --keep-going 2>&1)" && status=0 || status=$? test "$status" = 1 -test "$(<<<"$out" grep -E '^error:' | wc -l)" = 3 +test "$(<<<"$out" grep -cE '^error:')" = 3 <<<"$out" grepQuiet -E "error: 2 dependencies of derivation '.*-x4\\.drv' failed to build" <<<"$out" grepQuiet -vE "hash mismatch in fixed-output derivation '.*-x3\\.drv'" <<<"$out" grepQuiet -vE "hash mismatch in fixed-output derivation '.*-x2\\.drv'" diff --git a/tests/functional/ca/content-addressed.nix b/tests/functional/ca/content-addressed.nix index 2559c562f..411ebb86b 100644 --- a/tests/functional/ca/content-addressed.nix +++ b/tests/functional/ca/content-addressed.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/ca/config.nix"; let mkCADerivation = args: mkDerivation ({ __contentAddressed = true; diff --git a/tests/functional/ca/derivation-json.sh b/tests/functional/ca/derivation-json.sh index 1e2a8fe35..bd6dd7177 100644 --- a/tests/functional/ca/derivation-json.sh +++ b/tests/functional/ca/derivation-json.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# + source common.sh export NIX_TESTS_CA_BY_DEFAULT=1 diff --git a/tests/functional/ca/duplicate-realisation-in-closure.sh b/tests/functional/ca/duplicate-realisation-in-closure.sh index 0baf15cc2..4a5e8c042 100644 --- a/tests/functional/ca/duplicate-realisation-in-closure.sh +++ b/tests/functional/ca/duplicate-realisation-in-closure.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -source ./common.sh +source common.sh requireDaemonNewerThan "2.4pre20210625" diff --git a/tests/functional/ca/import-derivation.sh b/tests/functional/ca/import-derivation.sh deleted file mode 100644 index e98e0fbd0..000000000 --- a/tests/functional/ca/import-derivation.sh +++ /dev/null @@ -1,6 +0,0 @@ -source common.sh - -export NIX_TESTS_CA_BY_DEFAULT=1 - -cd .. && source import-derivation.sh - diff --git a/tests/functional/ca/import-from-derivation.sh b/tests/functional/ca/import-from-derivation.sh new file mode 100644 index 000000000..708d2fc78 --- /dev/null +++ b/tests/functional/ca/import-from-derivation.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +source common.sh + +export NIX_TESTS_CA_BY_DEFAULT=1 + +cd .. && source import-from-derivation.sh + diff --git a/tests/functional/ca/local.mk b/tests/functional/ca/local.mk index 4f86b268f..7c2fcc451 100644 --- a/tests/functional/ca/local.mk +++ b/tests/functional/ca/local.mk @@ -7,7 +7,7 @@ ca-tests := \ $(d)/duplicate-realisation-in-closure.sh \ $(d)/eval-store.sh \ $(d)/gc.sh \ - $(d)/import-derivation.sh \ + $(d)/import-from-derivation.sh \ $(d)/new-build-cmd.sh \ $(d)/nix-copy.sh \ $(d)/nix-run.sh \ diff --git a/tests/functional/ca/meson.build b/tests/functional/ca/meson.build index f682ab28f..7a7fcc5cf 100644 --- a/tests/functional/ca/meson.build +++ b/tests/functional/ca/meson.build @@ -16,7 +16,7 @@ suites += { 'duplicate-realisation-in-closure.sh', 'eval-store.sh', 'gc.sh', - 'import-derivation.sh', + 'import-from-derivation.sh', 'new-build-cmd.sh', 'nix-copy.sh', 'nix-run.sh', @@ -29,5 +29,5 @@ suites += { 'substitute.sh', 'why-depends.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), } diff --git a/tests/functional/ca/new-build-cmd.sh b/tests/functional/ca/new-build-cmd.sh index 432d4d132..408bfb0f6 100644 --- a/tests/functional/ca/new-build-cmd.sh +++ b/tests/functional/ca/new-build-cmd.sh @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + source common.sh export NIX_TESTS_CA_BY_DEFAULT=1 diff --git a/tests/functional/ca/nix-run.sh b/tests/functional/ca/nix-run.sh index 920950c11..21c09117e 100755 --- a/tests/functional/ca/nix-run.sh +++ b/tests/functional/ca/nix-run.sh @@ -2,6 +2,11 @@ source common.sh -FLAKE_PATH=path:$PWD +flakeDir="$TEST_HOME/flake" +mkdir -p "${flakeDir}" +cp flake.nix "${_NIX_TEST_BUILD_DIR}/ca/config.nix" content-addressed.nix "${flakeDir}" -nix run --no-write-lock-file "$FLAKE_PATH#runnable" +# `config.nix` cannot be gotten via build dir / env var (runs afoul pure eval). Instead get from flake. +removeBuildDirRef "$flakeDir"/*.nix + +nix run --no-write-lock-file "path:${flakeDir}#runnable" diff --git a/tests/functional/ca/nix-shell.sh b/tests/functional/ca/nix-shell.sh index 1c5a6639f..d1fbe54d1 100755 --- a/tests/functional/ca/nix-shell.sh +++ b/tests/functional/ca/nix-shell.sh @@ -5,4 +5,3 @@ source common.sh CONTENT_ADDRESSED=true cd .. source ./nix-shell.sh - diff --git a/tests/functional/ca/nondeterministic.nix b/tests/functional/ca/nondeterministic.nix index d6d099a3e..740be4bd2 100644 --- a/tests/functional/ca/nondeterministic.nix +++ b/tests/functional/ca/nondeterministic.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/ca/config.nix"; let mkCADerivation = args: mkDerivation ({ __contentAddressed = true; diff --git a/tests/functional/ca/racy.nix b/tests/functional/ca/racy.nix index 555a15484..cadd98675 100644 --- a/tests/functional/ca/racy.nix +++ b/tests/functional/ca/racy.nix @@ -2,7 +2,7 @@ # build it at once. -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/ca/config.nix"; mkDerivation { name = "simple"; diff --git a/tests/functional/ca/repl.sh b/tests/functional/ca/repl.sh index 3808c7cb2..0bbbebd85 100644 --- a/tests/functional/ca/repl.sh +++ b/tests/functional/ca/repl.sh @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + source common.sh export NIX_TESTS_CA_BY_DEFAULT=1 diff --git a/tests/functional/ca/why-depends.sh b/tests/functional/ca/why-depends.sh index 0c079f63b..0af8a5440 100644 --- a/tests/functional/ca/why-depends.sh +++ b/tests/functional/ca/why-depends.sh @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + source common.sh export NIX_TESTS_CA_BY_DEFAULT=1 diff --git a/tests/functional/case-collision.nar b/tests/functional/case-collision.nar new file mode 100644 index 000000000..2eff86901 Binary files /dev/null and b/tests/functional/case-collision.nar differ diff --git a/tests/functional/case-hack.sh b/tests/functional/case-hack.sh deleted file mode 100755 index feddc6583..000000000 --- a/tests/functional/case-hack.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -source common.sh - -TODO_NixOS - -clearStore - -rm -rf "$TEST_ROOT/case" - -opts=("--option" "use-case-hack" "true") - -# Check whether restoring and dumping a NAR that contains case -# collisions is round-tripping, even on a case-insensitive system. - -nix-store "${opts[@]}" --restore "$TEST_ROOT/case" < case.nar -nix-store "${opts[@]}" --dump "$TEST_ROOT/case" > "$TEST_ROOT/case.nar" -cmp case.nar "$TEST_ROOT/case.nar" -[ "$(nix-hash "${opts[@]}" --type sha256 "$TEST_ROOT/case")" = "$(nix-hash --flat --type sha256 case.nar)" ] - -# Check whether we detect true collisions (e.g. those remaining after -# removal of the suffix). -touch "$TEST_ROOT/case/xt_CONNMARK.h~nix~case~hack~3" -(! nix-store "${opts[@]}" --dump "$TEST_ROOT/case" > /dev/null) diff --git a/tests/functional/check-refs.nix b/tests/functional/check-refs.nix index 89690e456..54957f635 100644 --- a/tests/functional/check-refs.nix +++ b/tests/functional/check-refs.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { diff --git a/tests/functional/check-reqs.nix b/tests/functional/check-reqs.nix index 41436cb48..4e059f5a4 100644 --- a/tests/functional/check-reqs.nix +++ b/tests/functional/check-reqs.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { dep1 = mkDerivation { diff --git a/tests/functional/check.nix b/tests/functional/check.nix index ddab8eea9..13638eae8 100644 --- a/tests/functional/check.nix +++ b/tests/functional/check.nix @@ -1,6 +1,6 @@ {checkBuildId ? 0}: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; { nondeterministic = mkDerivation { diff --git a/tests/functional/check.sh b/tests/functional/check.sh index 9b15dccb6..b21349288 100755 --- a/tests/functional/check.sh +++ b/tests/functional/check.sh @@ -7,9 +7,9 @@ buggyNeedLocalStore "see #4813" checkBuildTempDirRemoved () { - buildDir=$(sed -n 's/CHECK_TMPDIR=//p' $1 | head -1) + buildDir=$(sed -n 's/CHECK_TMPDIR=//p' "$1" | head -1) checkBuildIdFile=${buildDir}/checkBuildId - [[ ! -f $checkBuildIdFile ]] || ! grep $checkBuildId $checkBuildIdFile + [[ ! -f $checkBuildIdFile ]] || ! grep "$checkBuildId" "$checkBuildIdFile" } # written to build temp directories to verify created by this instance @@ -23,20 +23,20 @@ nix-build dependencies.nix --no-out-link nix-build dependencies.nix --no-out-link --check # Build failure exit codes (100, 104, etc.) are from -# doc/manual/src/command-ref/status-build-failure.md +# doc/manual/source/command-ref/status-build-failure.md # check for dangling temporary build directories # only retain if build fails and --keep-failed is specified, or... # ...build is non-deterministic and --check and --keep-failed are both specified -nix-build check.nix -A failed --argstr checkBuildId $checkBuildId \ - --no-out-link 2> $TEST_ROOT/log || status=$? +nix-build check.nix -A failed --argstr checkBuildId "$checkBuildId" \ + --no-out-link 2> "$TEST_ROOT/log" || status=$? [ "$status" = "100" ] -checkBuildTempDirRemoved $TEST_ROOT/log +checkBuildTempDirRemoved "$TEST_ROOT/log" -nix-build check.nix -A failed --argstr checkBuildId $checkBuildId \ - --no-out-link --keep-failed 2> $TEST_ROOT/log || status=$? +nix-build check.nix -A failed --argstr checkBuildId "$checkBuildId" \ + --no-out-link --keep-failed 2> "$TEST_ROOT/log" || status=$? [ "$status" = "100" ] -if checkBuildTempDirRemoved $TEST_ROOT/log; then false; fi +if checkBuildTempDirRemoved "$TEST_ROOT/log"; then false; fi test_custom_build_dir() { local customBuildDir="$TEST_ROOT/custom-build-dir" @@ -44,42 +44,46 @@ test_custom_build_dir() { # Nix does not create the parent directories, and perhaps it shouldn't try to # decide the permissions of build-dir. mkdir "$customBuildDir" - nix-build check.nix -A failed --argstr checkBuildId $checkBuildId \ - --no-out-link --keep-failed --option build-dir "$TEST_ROOT/custom-build-dir" 2> $TEST_ROOT/log || status=$? + nix-build check.nix -A failed --argstr checkBuildId "$checkBuildId" \ + --no-out-link --keep-failed --option build-dir "$TEST_ROOT/custom-build-dir" 2> "$TEST_ROOT/log" || status=$? [ "$status" = "100" ] [[ 1 == "$(count "$customBuildDir/nix-build-"*)" ]] - local buildDir="$customBuildDir/nix-build-"*"" - if [[ -e $buildDir/build ]]; then - buildDir=$buildDir/build + local buildDir=("$customBuildDir/nix-build-"*) + if [[ "${#buildDir[@]}" -ne 1 ]]; then + echo "expected one nix-build-* directory, got: ${buildDir[*]}" >&2 + exit 1 fi - grep $checkBuildId $buildDir/checkBuildId + if [[ -e ${buildDir[*]}/build ]]; then + buildDir[0]="${buildDir[*]}/build" + fi + grep "$checkBuildId" "${buildDir[*]}/checkBuildId" } test_custom_build_dir -nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \ - --no-out-link 2> $TEST_ROOT/log -checkBuildTempDirRemoved $TEST_ROOT/log +nix-build check.nix -A deterministic --argstr checkBuildId "$checkBuildId" \ + --no-out-link 2> "$TEST_ROOT/log" +checkBuildTempDirRemoved "$TEST_ROOT/log" -nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \ - --no-out-link --check --keep-failed 2> $TEST_ROOT/log -if grepQuiet 'may not be deterministic' $TEST_ROOT/log; then false; fi -checkBuildTempDirRemoved $TEST_ROOT/log +nix-build check.nix -A deterministic --argstr checkBuildId "$checkBuildId" \ + --no-out-link --check --keep-failed 2> "$TEST_ROOT/log" +if grepQuiet 'may not be deterministic' "$TEST_ROOT/log"; then false; fi +checkBuildTempDirRemoved "$TEST_ROOT/log" -nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \ - --no-out-link 2> $TEST_ROOT/log -checkBuildTempDirRemoved $TEST_ROOT/log +nix-build check.nix -A nondeterministic --argstr checkBuildId "$checkBuildId" \ + --no-out-link 2> "$TEST_ROOT/log" +checkBuildTempDirRemoved "$TEST_ROOT/log" -nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \ - --no-out-link --check 2> $TEST_ROOT/log || status=$? -grep 'may not be deterministic' $TEST_ROOT/log +nix-build check.nix -A nondeterministic --argstr checkBuildId "$checkBuildId" \ + --no-out-link --check 2> "$TEST_ROOT/log" || status=$? +grep 'may not be deterministic' "$TEST_ROOT/log" [ "$status" = "104" ] -checkBuildTempDirRemoved $TEST_ROOT/log +checkBuildTempDirRemoved "$TEST_ROOT/log" -nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \ - --no-out-link --check --keep-failed 2> $TEST_ROOT/log || status=$? -grep 'may not be deterministic' $TEST_ROOT/log +nix-build check.nix -A nondeterministic --argstr checkBuildId "$checkBuildId" \ + --no-out-link --check --keep-failed 2> "$TEST_ROOT/log" || status=$? +grep 'may not be deterministic' "$TEST_ROOT/log" [ "$status" = "104" ] -if checkBuildTempDirRemoved $TEST_ROOT/log; then false; fi +if checkBuildTempDirRemoved "$TEST_ROOT/log"; then false; fi TODO_NixOS @@ -87,24 +91,24 @@ clearStore path=$(nix-build check.nix -A fetchurl --no-out-link) -chmod +w $path -echo foo > $path -chmod -w $path +chmod +w "$path" +echo foo > "$path" +chmod -w "$path" nix-build check.nix -A fetchurl --no-out-link --check # Note: "check" doesn't repair anything, it just compares to the hash stored in the database. -[[ $(cat $path) = foo ]] +[[ $(cat "$path") = foo ]] nix-build check.nix -A fetchurl --no-out-link --repair -[[ $(cat $path) != foo ]] +[[ $(cat "$path") != foo ]] -echo 'Hello World' > $TEST_ROOT/dummy +echo 'Hello World' > "$TEST_ROOT/dummy" nix-build check.nix -A hashmismatch --no-out-link || status=$? [ "$status" = "102" ] -echo -n > $TEST_ROOT/dummy +echo -n > "$TEST_ROOT/dummy" nix-build check.nix -A hashmismatch --no-out-link -echo 'Hello World' > $TEST_ROOT/dummy +echo 'Hello World' > "$TEST_ROOT/dummy" nix-build check.nix -A hashmismatch --no-out-link --check || status=$? [ "$status" = "102" ] diff --git a/tests/functional/chroot-store.sh b/tests/functional/chroot-store.sh index 03803a2b9..8c2a969d3 100755 --- a/tests/functional/chroot-store.sh +++ b/tests/functional/chroot-store.sh @@ -37,7 +37,10 @@ if canUseSandbox; then } EOF - cp simple.nix shell.nix simple.builder.sh config.nix "$flakeDir/" + cp simple.nix shell.nix simple.builder.sh "${config_nix}" "$flakeDir/" + + # `config.nix` cannot be gotten via build dir / env var (runs afoul pure eval). Instead get from flake. + removeBuildDirRef "$flakeDir"/*.nix TODO_NixOS diff --git a/tests/functional/common/functions.sh b/tests/functional/common/functions.sh index d05fac4e7..286bb58e8 100644 --- a/tests/functional/common/functions.sh +++ b/tests/functional/common/functions.sh @@ -28,7 +28,7 @@ clearProfiles() { # Clear the store, but do not fail if we're in an environment where we can't. # This allows the test to run in a NixOS test environment, where we use the system store. -# See doc/manual/src/contributing/testing.md / Running functional tests on NixOS. +# See doc/manual/source/contributing/testing.md / Running functional tests on NixOS. clearStoreIfPossible() { if isTestOnNixOS; then echo "clearStoreIfPossible: Not clearing store, because we're on NixOS. Moving on." @@ -343,6 +343,15 @@ count() { echo $# } +# Sometimes, e.g. due to pure eval, restricted eval, or sandboxing, we +# cannot look up `config.nix` in the build dir, and have to instead get +# it from the current directory. (In this case, the current directly +# will be somewhere in `$TEST_ROOT`.) +removeBuildDirRef() { + # shellcheck disable=SC2016 # The ${} in this is Nix, not shell + sed -i -e 's,"${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/[^ ]*config.nix",./config.nix,' "$@" +} + trap onError ERR fi # COMMON_FUNCTIONS_SH_SOURCED diff --git a/tests/functional/common/init.sh b/tests/functional/common/init.sh index d849c0734..66b44c76f 100755 --- a/tests/functional/common/init.sh +++ b/tests/functional/common/init.sh @@ -19,7 +19,7 @@ EOF # When we're doing everything in the same store, we need to bring # dependencies into context. - sed -i "$(dirname "${BASH_SOURCE[0]}")"/../config.nix \ + sed -i "${_NIX_TEST_BUILD_DIR}/config.nix" \ -e 's^\(shell\) = "/nix/store/\([^/]*\)/\(.*\)";^\1 = builtins.appendContext "/nix/store/\2" { "/nix/store/\2".path = true; } + "/\3";^' \ -e 's^\(path\) = "/nix/store/\([^/]*\)/\(.*\)";^\1 = builtins.appendContext "/nix/store/\2" { "/nix/store/\2".path = true; } + "/\3";^' \ ; diff --git a/tests/functional/common/paths.sh b/tests/functional/common/paths.sh index 70bdca796..dec3592af 100644 --- a/tests/functional/common/paths.sh +++ b/tests/functional/common/paths.sh @@ -8,11 +8,10 @@ COMMON_PATHS_SH_SOURCED=1 commonDir="$(readlink -f "$(dirname "${BASH_SOURCE[0]-$0}")")" -# Since these are generated files -# shellcheck disable=SC1091 +# Just for `isTestOnNixOS` source "$commonDir/functions.sh" # shellcheck disable=SC1091 -source "$commonDir/subst-vars.sh" +source "${_NIX_TEST_BUILD_DIR}/common/subst-vars.sh" # Make sure shellcheck knows this will be defined by the above generated snippet : "${bash?}" "${bindir?}" diff --git a/tests/functional/common/subst-vars.sh.in b/tests/functional/common/subst-vars.sh.in index 6ad22fa70..df140dec1 100644 --- a/tests/functional/common/subst-vars.sh.in +++ b/tests/functional/common/subst-vars.sh.in @@ -1,4 +1,4 @@ -# NOTE: instances of @variable@ are substituted as defined in /mk/templates.mk +# NOTE: instances of @variable@ are substituted by the build system if [[ -z "${COMMON_SUBST_VARS_SH_SOURCED-}" ]]; then diff --git a/tests/functional/common/vars.sh b/tests/functional/common/vars.sh index c99a6a5c2..4b88e8526 100644 --- a/tests/functional/common/vars.sh +++ b/tests/functional/common/vars.sh @@ -6,11 +6,14 @@ if [[ -z "${COMMON_VARS_SH_SOURCED-}" ]]; then COMMON_VARS_SH_SOURCED=1 +_NIX_TEST_SOURCE_DIR=$(realpath "${_NIX_TEST_SOURCE_DIR}") +_NIX_TEST_BUILD_DIR=$(realpath "${_NIX_TEST_BUILD_DIR}") + commonDir="$(readlink -f "$(dirname "${BASH_SOURCE[0]-$0}")")" # Since this is a generated file # shellcheck disable=SC1091 -source "$commonDir/subst-vars.sh" +source "${_NIX_TEST_BUILD_DIR}/common/subst-vars.sh" # Make sure shellcheck knows all these will be defined by the above generated snippet : "${bindir?} ${coreutils?} ${dot?} ${SHELL?} ${busybox?} ${version?} ${system?}" export coreutils dot busybox version system @@ -69,4 +72,9 @@ if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true _canUseSandbox=1 fi +# Very common, shorthand helps +# Used in other files +# shellcheck disable=SC2034 +config_nix="${_NIX_TEST_BUILD_DIR}/config.nix" + fi # COMMON_VARS_SH_SOURCED diff --git a/tests/functional/dependencies.nix b/tests/functional/dependencies.nix index be1a7ae9a..db06321da 100644 --- a/tests/functional/dependencies.nix +++ b/tests/functional/dependencies.nix @@ -1,5 +1,5 @@ { hashInvalidator ? "" }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let { diff --git a/tests/functional/dot.nar b/tests/functional/dot.nar new file mode 100644 index 000000000..3a9452f67 Binary files /dev/null and b/tests/functional/dot.nar differ diff --git a/tests/functional/dotdot.nar b/tests/functional/dotdot.nar new file mode 100644 index 000000000..f8d019c39 Binary files /dev/null and b/tests/functional/dotdot.nar differ diff --git a/tests/functional/duplicate.nar b/tests/functional/duplicate.nar new file mode 100644 index 000000000..1d0993ed4 Binary files /dev/null and b/tests/functional/duplicate.nar differ diff --git a/tests/functional/dyn-drv/meson.build b/tests/functional/dyn-drv/meson.build index 3c671d013..5b60a4698 100644 --- a/tests/functional/dyn-drv/meson.build +++ b/tests/functional/dyn-drv/meson.build @@ -15,5 +15,5 @@ suites += { 'dep-built-drv.sh', 'old-daemon-error-hack.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), } diff --git a/tests/functional/dyn-drv/old-daemon-error-hack.nix b/tests/functional/dyn-drv/old-daemon-error-hack.nix index c9d4a62d4..7d3ccf7e4 100644 --- a/tests/functional/dyn-drv/old-daemon-error-hack.nix +++ b/tests/functional/dyn-drv/old-daemon-error-hack.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/dyn-drv/config.nix"; # A simple content-addressed derivation. # The derivation can be arbitrarily modified by passing a different `seed`, diff --git a/tests/functional/dyn-drv/recursive-mod-json.nix b/tests/functional/dyn-drv/recursive-mod-json.nix index c6a24ca4f..0e778aa7f 100644 --- a/tests/functional/dyn-drv/recursive-mod-json.nix +++ b/tests/functional/dyn-drv/recursive-mod-json.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/dyn-drv/config.nix"; let innerName = "foo"; in diff --git a/tests/functional/dyn-drv/text-hashed-output.nix b/tests/functional/dyn-drv/text-hashed-output.nix index 99203b518..aa46fff61 100644 --- a/tests/functional/dyn-drv/text-hashed-output.nix +++ b/tests/functional/dyn-drv/text-hashed-output.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/dyn-drv/config.nix"; # A simple content-addressed derivation. # The derivation can be arbitrarily modified by passing a different `seed`, diff --git a/tests/functional/empty.nar b/tests/functional/empty.nar new file mode 100644 index 000000000..43434f2b4 Binary files /dev/null and b/tests/functional/empty.nar differ diff --git a/tests/functional/eval.sh b/tests/functional/eval.sh index 22d2d02a2..7af49d7fd 100755 --- a/tests/functional/eval.sh +++ b/tests/functional/eval.sh @@ -35,17 +35,17 @@ nix-instantiate --eval -E 'assert 1 + 2 == 3; true' [[ "$(nix-instantiate --eval -E '{"assert"=1;bar=2;}')" == '{ "assert" = 1; bar = 2; }' ]] # Check that symlink cycles don't cause a hang. -ln -sfn cycle.nix $TEST_ROOT/cycle.nix -(! nix eval --file $TEST_ROOT/cycle.nix) +ln -sfn cycle.nix "$TEST_ROOT/cycle.nix" +(! nix eval --file "$TEST_ROOT/cycle.nix") # Check that relative symlinks are resolved correctly. -mkdir -p $TEST_ROOT/xyzzy $TEST_ROOT/foo -ln -sfn ../xyzzy $TEST_ROOT/foo/bar -printf 123 > $TEST_ROOT/xyzzy/default.nix +mkdir -p "$TEST_ROOT/xyzzy" "$TEST_ROOT/foo" +ln -sfn ../xyzzy "$TEST_ROOT/foo/bar" +printf 123 > "$TEST_ROOT/xyzzy/default.nix" [[ $(nix eval --impure --expr "import $TEST_ROOT/foo/bar") = 123 ]] # Test --arg-from-file. -[[ "$(nix eval --raw --arg-from-file foo config.nix --expr '{ foo }: { inherit foo; }' foo)" = "$(cat config.nix)" ]] +[[ "$(nix eval --raw --arg-from-file foo "${config_nix}" --expr '{ foo }: { inherit foo; }' foo)" = "$(cat "${config_nix}")" ]] # Check that special(-ish) files are drained. if [[ -e /proc/version ]]; then @@ -57,7 +57,7 @@ fi # Test that unknown settings are warned about out="$(expectStderr 0 nix eval --option foobar baz --expr '""' --raw)" -[[ "$(echo "$out" | grep foobar | wc -l)" = 1 ]] +[[ "$(echo "$out" | grep -c foobar)" = 1 ]] # Test flag alias out="$(nix eval --expr '{}' --build-cores 1)" diff --git a/tests/functional/executable-after-contents.nar b/tests/functional/executable-after-contents.nar new file mode 100644 index 000000000..f8c003480 Binary files /dev/null and b/tests/functional/executable-after-contents.nar differ diff --git a/tests/functional/export-graph.nix b/tests/functional/export-graph.nix index 64fe36bd1..97ffe73a9 100644 --- a/tests/functional/export-graph.nix +++ b/tests/functional/export-graph.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { diff --git a/tests/functional/extra-sandbox-profile.nix b/tests/functional/extra-sandbox-profile.nix index aa680b918..5f0e4753f 100644 --- a/tests/functional/extra-sandbox-profile.nix +++ b/tests/functional/extra-sandbox-profile.nix @@ -1,6 +1,6 @@ { destFile, seed }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "simple"; diff --git a/tests/functional/failing.nix b/tests/functional/failing.nix index d25e2d6b6..8b7990679 100644 --- a/tests/functional/failing.nix +++ b/tests/functional/failing.nix @@ -1,5 +1,5 @@ { busybox }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let mkDerivation = args: diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index 4a3e4c347..cd3b51674 100755 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -104,6 +104,27 @@ noSubmoduleRepo=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$subR [[ $noSubmoduleRepoBaseline == $noSubmoduleRepo ]] +# Test .gitmodules with entries that refer to non-existent objects or objects that are not submodules. +cat >> $rootRepo/.gitmodules < $rootRepo/file +git -C $rootRepo add file +git -C $rootRepo commit -a -m "Add bad submodules" + +rev=$(git -C $rootRepo rev-parse HEAD) + +r=$(nix eval --raw --expr "builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }") + +[[ -f $r/file ]] +[[ ! -e $r/missing ]] + # Test relative submodule URLs. rm $TEST_HOME/.cache/nix/fetcher-cache* rm -rf $rootRepo/.git $rootRepo/.gitmodules $rootRepo/sub diff --git a/tests/functional/fetchPath.sh b/tests/functional/fetchPath.sh index 560a270c1..1df895b61 100755 --- a/tests/functional/fetchPath.sh +++ b/tests/functional/fetchPath.sh @@ -6,3 +6,6 @@ touch "$TEST_ROOT/foo" -t 202211111111 # We only check whether 2022-11-1* **:**:** is the last modified date since # `lastModified` is transformed into UTC in `builtins.fetchTarball`. [[ "$(nix eval --impure --raw --expr "(builtins.fetchTree \"path://$TEST_ROOT/foo\").lastModifiedDate")" =~ 2022111.* ]] + +# Check that we can override lastModified for "path:" inputs. +[[ "$(nix eval --impure --expr "(builtins.fetchTree { type = \"path\"; path = \"$TEST_ROOT/foo\"; lastModified = 123; }).lastModified")" = 123 ]] diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 5af44fcf2..c25ac3216 100755 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -9,12 +9,12 @@ clearStore # Test fetching a flat file. hash=$(nix-hash --flat --type sha256 ./fetchurl.sh) -outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link) +outPath=$(nix-build -vvvvv --expr 'import ' --argstr url "file://$(pwd)/fetchurl.sh" --argstr sha256 "$hash" --no-out-link) -cmp $outPath fetchurl.sh +cmp "$outPath" fetchurl.sh # Do not re-fetch paths already present. -outPath2=$(nix-build -vvvvv --expr 'import ' --argstr url file:///does-not-exist/must-remain-unused/fetchurl.sh --argstr sha256 $hash --no-out-link) +outPath2=$(nix-build -vvvvv --expr 'import ' --argstr url file:///does-not-exist/must-remain-unused/fetchurl.sh --argstr sha256 "$hash" --no-out-link) test "$outPath" == "$outPath2" # Now using a base-64 hash. @@ -22,9 +22,9 @@ clearStore hash=$(nix hash file --type sha512 --base64 ./fetchurl.sh) -outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link) +outPath=$(nix-build -vvvvv --expr 'import ' --argstr url "file://$(pwd)/fetchurl.sh" --argstr sha512 "$hash" --no-out-link) -cmp $outPath fetchurl.sh +cmp "$outPath" fetchurl.sh # Now using an SRI hash. clearStore @@ -33,58 +33,58 @@ hash=$(nix hash file ./fetchurl.sh) [[ $hash =~ ^sha256- ]] -outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link) +outPath=$(nix-build -vvvvv --expr 'import ' --argstr url "file://$(pwd)/fetchurl.sh" --argstr hash "$hash" --no-out-link) -cmp $outPath fetchurl.sh +cmp "$outPath" fetchurl.sh # Test that we can substitute from a different store dir. clearStore -other_store=file://$TEST_ROOT/other_store?store=/fnord/store +other_store="file://$TEST_ROOT/other_store?store=/fnord/store" hash=$(nix hash file --type sha256 --base16 ./fetchurl.sh) -storePath=$(nix --store $other_store store add-file ./fetchurl.sh) +nix --store "$other_store" store add-file ./fetchurl.sh -outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha256 $hash --no-out-link --substituters $other_store) +outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha256 "$hash" --no-out-link --substituters "$other_store") # Test hashed mirrors with an SRI hash. -nix-build -vvvvv --expr 'import ' --argstr url file:///no-such-dir/fetchurl.sh --argstr hash $(nix hash to-sri --type sha256 $hash) \ - --no-out-link --substituters $other_store +nix-build -vvvvv --expr 'import ' --argstr url file:///no-such-dir/fetchurl.sh --argstr hash "$(nix hash to-sri --type sha256 "$hash")" \ + --no-out-link --substituters "$other_store" # Test unpacking a NAR. -rm -rf $TEST_ROOT/archive -mkdir -p $TEST_ROOT/archive -cp ./fetchurl.sh $TEST_ROOT/archive -chmod +x $TEST_ROOT/archive/fetchurl.sh -ln -s foo $TEST_ROOT/archive/symlink -nar=$TEST_ROOT/archive.nar -nix-store --dump $TEST_ROOT/archive > $nar +rm -rf "$TEST_ROOT/archive" +mkdir -p "$TEST_ROOT/archive" +cp ./fetchurl.sh "$TEST_ROOT/archive" +chmod +x "$TEST_ROOT/archive/fetchurl.sh" +ln -s foo "$TEST_ROOT/archive/symlink" +nar="$TEST_ROOT/archive.nar" +nix-store --dump "$TEST_ROOT/archive" > "$nar" -hash=$(nix-hash --flat --type sha256 $nar) +hash=$(nix-hash --flat --type sha256 "$nar") -outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file://$nar --argstr sha256 $hash \ +outPath=$(nix-build -vvvvv --expr 'import ' --argstr url "file://$nar" --argstr sha256 "$hash" \ --arg unpack true --argstr name xyzzy --no-out-link) -echo $outPath | grepQuiet 'xyzzy' +echo "$outPath" | grepQuiet 'xyzzy' -test -x $outPath/fetchurl.sh -test -L $outPath/symlink +test -x "$outPath/fetchurl.sh" +test -L "$outPath/symlink" -nix-store --delete $outPath +nix-store --delete "$outPath" # Test unpacking a compressed NAR. -narxz=$TEST_ROOT/archive.nar.xz -rm -f $narxz -xz --keep $nar -outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file://$narxz --argstr sha256 $hash \ +narxz="$TEST_ROOT/archive.nar.xz" +rm -f "$narxz" +xz --keep "$nar" +outPath=$(nix-build -vvvvv --expr 'import ' --argstr url "file://$narxz" --argstr sha256 "$hash" \ --arg unpack true --argstr name xyzzy --no-out-link) -test -x $outPath/fetchurl.sh -test -L $outPath/symlink +test -x "$outPath/fetchurl.sh" +test -L "$outPath/symlink" # Make sure that *not* passing a outputHash fails. requireDaemonNewerThan "2.20" expected=100 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly -expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output or impure derivation' +expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url "file://$narxz" 2>&1 | grep 'must be a fixed-output or impure derivation' diff --git a/tests/functional/filter-source.nix b/tests/functional/filter-source.nix index 907163639..dcef9c4e2 100644 --- a/tests/functional/filter-source.nix +++ b/tests/functional/filter-source.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "filter"; diff --git a/tests/functional/fixed.nix b/tests/functional/fixed.nix index a920a2167..f70b89091 100644 --- a/tests/functional/fixed.nix +++ b/tests/functional/fixed.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { diff --git a/tests/functional/flakes/build-paths.sh b/tests/functional/flakes/build-paths.sh index f8486528b..cbae6a4e6 100755 --- a/tests/functional/flakes/build-paths.sh +++ b/tests/functional/flakes/build-paths.sh @@ -79,7 +79,7 @@ cat > "$flake1Dir"/flake.nix < "$flake1Dir/foo" diff --git a/tests/functional/flakes/bundle.sh b/tests/functional/flakes/bundle.sh index 5e185cbf6..61aa040e7 100755 --- a/tests/functional/flakes/bundle.sh +++ b/tests/functional/flakes/bundle.sh @@ -2,7 +2,10 @@ source common.sh -cp ../simple.nix ../simple.builder.sh ../config.nix "$TEST_HOME" +cp ../simple.nix ../simple.builder.sh "${config_nix}" "$TEST_HOME" + +# `config.nix` cannot be gotten via build dir / env var (runs afoul pure eval). Instead get from flake. +removeBuildDirRef "$TEST_HOME"/*.nix cd "$TEST_HOME" diff --git a/tests/functional/flakes/check.sh b/tests/functional/flakes/check.sh index 3b83dcafe..27e73444a 100755 --- a/tests/functional/flakes/check.sh +++ b/tests/functional/flakes/check.sh @@ -91,3 +91,47 @@ nix flake check $flakeDir checkRes=$(nix flake check --all-systems --keep-going $flakeDir 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "packages.system-1.default" echo "$checkRes" | grepQuiet "packages.system-2.default" + +cat > $flakeDir/flake.nix < $flakeDir/flake.nix <&1 && fail "nix flake check --all-systems should have failed" || true) +echo "$checkRes" | grepQuiet "unknown-attr" + +cat > $flakeDir/flake.nix <&1 && fail "nix flake check --all-systems should have failed" || true) +echo "$checkRes" | grepQuiet "formatter.system-1" diff --git a/tests/functional/flakes/common.sh b/tests/functional/flakes/common.sh index f83a02aba..8af72f2ad 100644 --- a/tests/functional/flakes/common.sh +++ b/tests/functional/flakes/common.sh @@ -1,10 +1,13 @@ +# shellcheck shell=bash + source ../common.sh +# shellcheck disable=SC2034 # this variable is used by tests that source this file registry=$TEST_ROOT/registry.json writeSimpleFlake() { local flakeDir="$1" - cat > $flakeDir/flake.nix < "$flakeDir/flake.nix" < $flakeDir/flake.nix < "$flakeDir/flake.nix" < $flakeDir/flake.nix < "$flakeDir/flake.nix" < flake.nix EOF # Without --accept-flake-config, the post hook should not run. +# To test variations in stderr tty-ness, we run the command in different ways, +# none of which should block on stdin or accept the `nixConfig`s. nix build < /dev/null +nix build < /dev/null 2>&1 | cat +# EOF counts as no, even when interactive (throw EOF error before) +if type -p script >/dev/null && script -q -c true /dev/null; then + echo "script is available and GNU-like, so we can ensure a tty" + script -q -c 'nix build < /dev/null' /dev/null +else + echo "script is not available or not GNU-like, so we skip testing with an added tty" +fi (! [[ -f post-hook-ran ]]) TODO_NixOS clearStore diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh index 60abf3ef2..2e75081d4 100755 --- a/tests/functional/flakes/develop.sh +++ b/tests/functional/flakes/develop.sh @@ -5,11 +5,11 @@ source ../common.sh TODO_NixOS clearStore -rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local +rm -rf "$TEST_HOME/.cache" "$TEST_HOME/.config" "$TEST_HOME/.local" # Create flake under test. -cp ../shell-hello.nix ../config.nix $TEST_HOME/ -cat <$TEST_HOME/flake.nix +cp ../shell-hello.nix "$config_nix" "$TEST_HOME/" +cat <"$TEST_HOME/flake.nix" { inputs.nixpkgs.url = "$TEST_HOME/nixpkgs"; outputs = {self, nixpkgs}: { @@ -24,9 +24,13 @@ cat <$TEST_HOME/flake.nix EOF # Create fake nixpkgs flake. -mkdir -p $TEST_HOME/nixpkgs -cp ../config.nix ../shell.nix $TEST_HOME/nixpkgs -cat <$TEST_HOME/nixpkgs/flake.nix +mkdir -p "$TEST_HOME/nixpkgs" +cp "${config_nix}" ../shell.nix "$TEST_HOME/nixpkgs" + +# `config.nix` cannot be gotten via build dir / env var (runs afoul pure eval). Instead get from flake. +removeBuildDirRef "$TEST_HOME/nixpkgs"/*.nix + +cat <"$TEST_HOME/nixpkgs/flake.nix" { outputs = {self}: { legacyPackages.$system.bashInteractive = (import ./shell.nix {}).bashInteractive; @@ -34,7 +38,7 @@ cat <$TEST_HOME/nixpkgs/flake.nix } EOF -cd $TEST_HOME +cd "$TEST_HOME" # Test whether `nix develop` passes through environment variables. [[ "$( @@ -43,13 +47,86 @@ echo "\$ENVVAR" EOF )" = "a" ]] -# Test whether `nix develop --ignore-environment` does _not_ pass through environment variables. +# Test whether `nix develop --ignore-env` does _not_ pass through environment variables. [[ -z "$( - ENVVAR=a nix develop --ignore-environment --no-write-lock-file .#hello < "$flake3Dir/flake.nix" < flake.nix @@ -37,11 +40,13 @@ env > $TEST_ROOT/expected-env nix run -f shell-hello.nix env > $TEST_ROOT/actual-env # Remove/reset variables we expect to be different. # - PATH is modified by nix shell +# - we unset TMPDIR on macOS if it contains /var/folders. bad. https://github.com/NixOS/nix/issues/7731 # - _ is set by bash and is expected to differ because it contains the original command # - __CF_USER_TEXT_ENCODING is set by macOS and is beyond our control sed -i \ -e 's/PATH=.*/PATH=.../' \ -e 's/_=.*/_=.../' \ + -e '/^TMPDIR=\/var\/folders\/.*/d' \ -e '/^__CF_USER_TEXT_ENCODING=.*$/d' \ $TEST_ROOT/expected-env $TEST_ROOT/actual-env sort $TEST_ROOT/expected-env | uniq > $TEST_ROOT/expected-env.sorted diff --git a/tests/functional/fmt.sh b/tests/functional/fmt.sh index b29fe64d6..e9bff50d5 100755 --- a/tests/functional/fmt.sh +++ b/tests/functional/fmt.sh @@ -5,13 +5,13 @@ source common.sh TODO_NixOS # Provide a `shell` variable. Try not to `export` it, perhaps. clearStoreIfPossible -rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local +rm -rf "$TEST_HOME"/.cache "$TEST_HOME"/.config "$TEST_HOME"/.local -cp ./simple.nix ./simple.builder.sh ./fmt.simple.sh ./config.nix $TEST_HOME +cp ./simple.nix ./simple.builder.sh ./fmt.simple.sh "${config_nix}" "$TEST_HOME" -cd $TEST_HOME +cd "$TEST_HOME" -nix fmt --help | grep "Format" +nix fmt --help | grep "forward" cat << EOF > flake.nix { @@ -30,6 +30,9 @@ cat << EOF > flake.nix }; } EOF -nix fmt ./file ./folder | grep 'Formatting: ./file ./folder' +# No arguments check +[[ "$(nix fmt)" = "Formatting(0):" ]] +# Argument forwarding check +nix fmt ./file ./folder | grep 'Formatting(2): ./file ./folder' nix flake check nix flake show | grep -P "package 'formatter'" diff --git a/tests/functional/fmt.simple.sh b/tests/functional/fmt.simple.sh index 4c8c67ebb..e53f6c9be 100755 --- a/tests/functional/fmt.simple.sh +++ b/tests/functional/fmt.simple.sh @@ -1 +1,2 @@ -echo Formatting: "${@}" +#!/usr/bin/env bash +echo "Formatting(${#}):" "${@}" diff --git a/tests/functional/fod-failing.nix b/tests/functional/fod-failing.nix index 37c04fe12..7881a3fbf 100644 --- a/tests/functional/fod-failing.nix +++ b/tests/functional/fod-failing.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { x1 = mkDerivation { name = "x1"; diff --git a/tests/functional/gc-auto.sh b/tests/functional/gc-auto.sh index 8f25be3e9..efe3e4b2b 100755 --- a/tests/functional/gc-auto.sh +++ b/tests/functional/gc-auto.sh @@ -23,7 +23,7 @@ fifoLock=$TEST_ROOT/fifoLock mkfifo "$fifoLock" expr=$(cat < $fifo2\"; diff --git a/tests/functional/gc-runtime.nix b/tests/functional/gc-runtime.nix index ee5980bdf..2603fafdf 100644 --- a/tests/functional/gc-runtime.nix +++ b/tests/functional/gc-runtime.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "gc-runtime"; diff --git a/tests/functional/gc.sh b/tests/functional/gc.sh index 7707a7e27..c58f47021 100755 --- a/tests/functional/gc.sh +++ b/tests/functional/gc.sh @@ -11,46 +11,46 @@ outPath=$(nix-store -rvv "$drvPath") # Set a GC root. rm -f "$NIX_STATE_DIR/gcroots/foo" -ln -sf $outPath "$NIX_STATE_DIR/gcroots/foo" +ln -sf "$outPath" "$NIX_STATE_DIR/gcroots/foo" -[ "$(nix-store -q --roots $outPath)" = "$NIX_STATE_DIR/gcroots/foo -> $outPath" ] +[ "$(nix-store -q --roots "$outPath")" = "$NIX_STATE_DIR/gcroots/foo -> $outPath" ] -nix-store --gc --print-roots | grep $outPath -nix-store --gc --print-live | grep $outPath -nix-store --gc --print-dead | grep $drvPath -if nix-store --gc --print-dead | grep -E $outPath$; then false; fi +nix-store --gc --print-roots | grep "$outPath" +nix-store --gc --print-live | grep "$outPath" +nix-store --gc --print-dead | grep "$drvPath" +if nix-store --gc --print-dead | grep -E "$outPath"$; then false; fi nix-store --gc --print-dead -inUse=$(readLink $outPath/reference-to-input-2) -if nix-store --delete $inUse; then false; fi -test -e $inUse +inUse=$(readLink "$outPath/reference-to-input-2") +if nix-store --delete "$inUse"; then false; fi +test -e "$inUse" -if nix-store --delete $outPath; then false; fi -test -e $outPath +if nix-store --delete "$outPath"; then false; fi +test -e "$outPath" -for i in $NIX_STORE_DIR/*; do +for i in "$NIX_STORE_DIR"/*; do if [[ $i =~ /trash ]]; then continue; fi # compat with old daemon - touch $i.lock - touch $i.chroot + touch "$i.lock" + touch "$i.chroot" done nix-collect-garbage # Check that the root and its dependencies haven't been deleted. -cat $outPath/foobar -cat $outPath/reference-to-input-2/bar +cat "$outPath/foobar" +cat "$outPath/reference-to-input-2/bar" # Check that the derivation has been GC'd. -if test -e $drvPath; then false; fi +if test -e "$drvPath"; then false; fi rm "$NIX_STATE_DIR/gcroots/foo" nix-collect-garbage # Check that the output has been GC'd. -if test -e $outPath/foobar; then false; fi +if test -e "$outPath/foobar"; then false; fi # Check that the store is empty. -rmdir $NIX_STORE_DIR/.links -rmdir $NIX_STORE_DIR +rmdir "$NIX_STORE_DIR/.links" +rmdir "$NIX_STORE_DIR" diff --git a/tests/functional/git-hashing/meson.build b/tests/functional/git-hashing/meson.build index 7486bfb8f..470c53fc5 100644 --- a/tests/functional/git-hashing/meson.build +++ b/tests/functional/git-hashing/meson.build @@ -4,5 +4,5 @@ suites += { 'tests': [ 'simple.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), } diff --git a/tests/functional/help.sh b/tests/functional/help.sh index 61efc1cb2..127cc455b 100755 --- a/tests/functional/help.sh +++ b/tests/functional/help.sh @@ -65,5 +65,6 @@ def recurse($prefix): } nix __dump-cli | subcommands | while IFS= read -r cmd; do + # shellcheck disable=SC2086 # word splitting of cmd is intended nix $cmd --help done diff --git a/tests/functional/hermetic.nix b/tests/functional/hermetic.nix index d1dccdff3..dafe8ad9f 100644 --- a/tests/functional/hermetic.nix +++ b/tests/functional/hermetic.nix @@ -5,7 +5,7 @@ , withFinalRefs ? false }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let contentAddressedByDefault = builtins.getEnv "NIX_TESTS_CA_BY_DEFAULT" == "1"; diff --git a/tests/functional/ifd.nix b/tests/functional/ifd.nix index d0b9b54ad..c84ffbc66 100644 --- a/tests/functional/ifd.nix +++ b/tests/functional/ifd.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; import ( mkDerivation { name = "foo"; diff --git a/tests/functional/import-derivation.nix b/tests/functional/import-derivation.nix deleted file mode 100644 index 44fa9a45d..000000000 --- a/tests/functional/import-derivation.nix +++ /dev/null @@ -1,26 +0,0 @@ -with import ./config.nix; - -let - - bar = mkDerivation { - name = "bar"; - builder = builtins.toFile "builder.sh" - '' - echo 'builtins.add 123 456' > $out - ''; - }; - - value = - # Test that pathExists can check the existence of /nix/store paths - assert builtins.pathExists bar; - import bar; - -in - -mkDerivation { - name = "foo"; - builder = builtins.toFile "builder.sh" - '' - echo -n FOO${toString value} > $out - ''; -} diff --git a/tests/functional/import-derivation.sh b/tests/functional/import-derivation.sh deleted file mode 100755 index 68ddcfa4a..000000000 --- a/tests/functional/import-derivation.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -source common.sh - -clearStoreIfPossible - -if nix-instantiate --readonly-mode ./import-derivation.nix; then - echo "read-only evaluation of an imported derivation unexpectedly failed" - exit 1 -fi - -outPath=$(nix-build ./import-derivation.nix --no-out-link) - -[ "$(cat "$outPath")" = FOO579 ] diff --git a/tests/functional/import-from-derivation.nix b/tests/functional/import-from-derivation.nix new file mode 100644 index 000000000..8864fb30a --- /dev/null +++ b/tests/functional/import-from-derivation.nix @@ -0,0 +1,33 @@ +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; + +rec { + bar = mkDerivation { + name = "bar"; + builder = builtins.toFile "builder.sh" + '' + echo 'builtins.add 123 456' > $out + ''; + }; + + value = + # Test that pathExists can check the existence of /nix/store paths + assert builtins.pathExists bar; + import bar; + + result = mkDerivation { + name = "foo"; + builder = builtins.toFile "builder.sh" + '' + echo -n FOO${toString value} > $out + ''; + }; + + addPath = mkDerivation { + name = "add-path"; + src = builtins.filterSource (path: type: true) result; + builder = builtins.toFile "builder.sh" + '' + echo -n BLA$(cat $src) > $out + ''; + }; +} diff --git a/tests/functional/import-from-derivation.sh b/tests/functional/import-from-derivation.sh new file mode 100755 index 000000000..83ef92a6f --- /dev/null +++ b/tests/functional/import-from-derivation.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +source common.sh + +TODO_NixOS + +clearStoreIfPossible + +if nix-instantiate --readonly-mode ./import-from-derivation.nix -A result; then + echo "read-only evaluation of an imported derivation unexpectedly failed" + exit 1 +fi + +outPath=$(nix-build ./import-from-derivation.nix -A result --no-out-link) + +[ "$(cat "$outPath")" = FOO579 ] + +# FIXME: the next tests are broken on CA. +if [[ -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + exit 0 +fi + +# Test filterSource on the result of a derivation. +outPath2=$(nix-build ./import-from-derivation.nix -A addPath --no-out-link) +[[ "$(cat "$outPath2")" = BLAFOO579 ]] + +# Test that IFD works with a chroot store. +if canUseSandbox; then + + store2="$TEST_ROOT/store2" + store2_url="$store2?store=$NIX_STORE_DIR" + + # Copy the derivation outputs to the chroot store to avoid having + # to actually build anything, as that would fail due to the lack + # of a shell in the sandbox. We only care about testing the IFD + # semantics. + for i in bar result addPath; do + nix copy --to "$store2_url" --no-check-sigs "$(nix-build ./import-from-derivation.nix -A "$i" --no-out-link)" + done + + clearStore + + outPath_check=$(nix-build ./import-from-derivation.nix -A result --no-out-link --store "$store2_url") + [[ "$outPath" = "$outPath_check" ]] + [[ ! -e "$outPath" ]] + [[ -e "$store2/nix/store/$(basename "$outPath")" ]] + + outPath2_check=$(nix-build ./import-from-derivation.nix -A addPath --no-out-link --store "$store2_url") + [[ "$outPath2" = "$outPath2_check" ]] +fi diff --git a/tests/functional/impure-derivations.nix b/tests/functional/impure-derivations.nix index 98547e6c1..04710323f 100644 --- a/tests/functional/impure-derivations.nix +++ b/tests/functional/impure-derivations.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { diff --git a/tests/functional/impure-env.nix b/tests/functional/impure-env.nix index 2b0380ed7..6b9e5a825 100644 --- a/tests/functional/impure-env.nix +++ b/tests/functional/impure-env.nix @@ -1,6 +1,6 @@ { var, value }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "test"; diff --git a/tests/functional/lang.sh b/tests/functional/lang.sh index 46cf3f1fe..e64663d30 100755 --- a/tests/functional/lang.sh +++ b/tests/functional/lang.sh @@ -37,12 +37,14 @@ nix-instantiate --eval -E 'let x = { repeating = x; tracing = builtins.trace x t 2>&1 | grepQuiet -F 'trace: { repeating = «repeated»; tracing = «potential infinite recursion»; }' nix-instantiate --eval -E 'builtins.warn "Hello" 123' 2>&1 | grepQuiet 'warning: Hello' +# shellcheck disable=SC2016 # The ${} in this is Nix, not shell nix-instantiate --eval -E 'builtins.addErrorContext "while doing ${"something"} interesting" (builtins.warn "Hello" 123)' 2>/dev/null | grepQuiet 123 # warn does not accept non-strings for now expectStderr 1 nix-instantiate --eval -E 'let x = builtins.warn { x = x; } true; in x' \ | grepQuiet "expected a string but found a set" expectStderr 1 nix-instantiate --eval --abort-on-warn -E 'builtins.warn "Hello" 123' | grepQuiet Hello +# shellcheck disable=SC2016 # The ${} in this is Nix, not shell NIX_ABORT_ON_WARN=1 expectStderr 1 nix-instantiate --eval -E 'builtins.addErrorContext "while doing ${"something"} interesting" (builtins.warn "Hello" 123)' | grepQuiet "while doing something interesting" set +x @@ -106,6 +108,7 @@ for i in lang/eval-fail-*.nix; do fi )" if + # shellcheck disable=SC2086 # word splitting of flags is intended expectStderr 1 nix-instantiate $flags "lang/$i.nix" \ | sed "s!$(pwd)!/pwd!g" > "lang/$i.err" then diff --git a/tests/functional/lang/eval-okay-deprecate-cursed-or.err.exp b/tests/functional/lang/eval-okay-deprecate-cursed-or.err.exp new file mode 100644 index 000000000..4a656827a --- /dev/null +++ b/tests/functional/lang/eval-okay-deprecate-cursed-or.err.exp @@ -0,0 +1,12 @@ +warning: at /pwd/lang/eval-okay-deprecate-cursed-or.nix:3:47: This expression uses `or` as an identifier in a way that will change in a future Nix release. +Wrap this entire expression in parentheses to preserve its current meaning: + ((x: x) or) +Give feedback at https://github.com/NixOS/nix/pull/11121 +warning: at /pwd/lang/eval-okay-deprecate-cursed-or.nix:4:39: This expression uses `or` as an identifier in a way that will change in a future Nix release. +Wrap this entire expression in parentheses to preserve its current meaning: + ((x: x + 1) or) +Give feedback at https://github.com/NixOS/nix/pull/11121 +warning: at /pwd/lang/eval-okay-deprecate-cursed-or.nix:5:44: This expression uses `or` as an identifier in a way that will change in a future Nix release. +Wrap this entire expression in parentheses to preserve its current meaning: + ((x: x) or) +Give feedback at https://github.com/NixOS/nix/pull/11121 diff --git a/tests/functional/lang/eval-okay-deprecate-cursed-or.exp b/tests/functional/lang/eval-okay-deprecate-cursed-or.exp new file mode 100644 index 000000000..573541ac9 --- /dev/null +++ b/tests/functional/lang/eval-okay-deprecate-cursed-or.exp @@ -0,0 +1 @@ +0 diff --git a/tests/functional/lang/eval-okay-deprecate-cursed-or.nix b/tests/functional/lang/eval-okay-deprecate-cursed-or.nix new file mode 100644 index 000000000..a4f9e747f --- /dev/null +++ b/tests/functional/lang/eval-okay-deprecate-cursed-or.nix @@ -0,0 +1,11 @@ +let + # These are cursed and should warn + cursed0 = builtins.length (let or = 1; in [ (x: x) or ]); + cursed1 = let or = 1; in (x: x * 2) (x: x + 1) or; + cursed2 = let or = 1; in { a = 2; }.a or (x: x) or; + + # These are uses of `or` as an identifier that are not cursed + allowed0 = let or = (x: x); in map or []; + allowed1 = let f = (x: x); or = f; in f (f or); +in +0 diff --git a/tests/functional/linux-sandbox-cert-test.nix b/tests/functional/linux-sandbox-cert-test.nix index 2fc083ea9..e506b6a0f 100644 --- a/tests/functional/linux-sandbox-cert-test.nix +++ b/tests/functional/linux-sandbox-cert-test.nix @@ -1,6 +1,6 @@ { mode }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation ( { diff --git a/tests/functional/linux-sandbox.sh b/tests/functional/linux-sandbox.sh index 653a3873f..81ef36237 100755 --- a/tests/functional/linux-sandbox.sh +++ b/tests/functional/linux-sandbox.sh @@ -40,14 +40,14 @@ nix-sandbox-build dependencies.nix --check # Test that sandboxed builds with --check and -K can move .check directory to store nix-sandbox-build check.nix -A nondeterministic -# `100 + 4` means non-determinstic, see doc/manual/src/command-ref/status-build-failure.md +# `100 + 4` means non-determinstic, see doc/manual/source/command-ref/status-build-failure.md expectStderr 104 nix-sandbox-build check.nix -A nondeterministic --check -K > $TEST_ROOT/log grepQuietInverse 'error: renaming' $TEST_ROOT/log grepQuiet 'may not be deterministic' $TEST_ROOT/log # Test that sandboxed builds cannot write to /etc easily -# `100` means build failure without extra info, see doc/manual/src/command-ref/status-build-failure.md -expectStderr 100 nix-sandbox-build -E 'with import ./config.nix; mkDerivation { name = "etc-write"; buildCommand = "echo > /etc/test"; }' | +# `100` means build failure without extra info, see doc/manual/source/command-ref/status-build-failure.md +expectStderr 100 nix-sandbox-build -E 'with import '"${config_nix}"'; mkDerivation { name = "etc-write"; buildCommand = "echo > /etc/test"; }' | grepQuiet "/etc/test: Permission denied" @@ -56,7 +56,7 @@ testCert () { expectation=$1 # "missing" | "present" mode=$2 # "normal" | "fixed-output" certFile=$3 # a string that can be the path to a cert file - # `100` means build failure without extra info, see doc/manual/src/command-ref/status-build-failure.md + # `100` means build failure without extra info, see doc/manual/source/command-ref/status-build-failure.md [ "$mode" == fixed-output ] && ret=1 || ret=100 expectStderr $ret nix-sandbox-build linux-sandbox-cert-test.nix --argstr mode "$mode" --option ssl-cert-file "$certFile" | grepQuiet "CERT_${expectation}_IN_SANDBOX" diff --git a/tests/functional/local-overlay-store/common.sh b/tests/functional/local-overlay-store/common.sh index 27338ea23..cbdb3a1f7 100644 --- a/tests/functional/local-overlay-store/common.sh +++ b/tests/functional/local-overlay-store/common.sh @@ -69,7 +69,7 @@ mountOverlayfs () { || skipTest "overlayfs is not supported" cleanupOverlay () { - umount "$storeBRoot/nix/store" + umount -n "$storeBRoot/nix/store" rm -r $storeVolume/workdir } trap cleanupOverlay EXIT diff --git a/tests/functional/local-overlay-store/meson.build b/tests/functional/local-overlay-store/meson.build index 6ff5d3169..b7ba5a323 100644 --- a/tests/functional/local-overlay-store/meson.build +++ b/tests/functional/local-overlay-store/meson.build @@ -14,5 +14,5 @@ suites += { 'optimise.sh', 'stale-file-handle.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), } diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 8b4945cac..e50b5eaf1 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -88,9 +88,9 @@ nix_tests = \ why-depends.sh \ derivation-json.sh \ derivation-advanced-attributes.sh \ - import-derivation.sh \ + import-from-derivation.sh \ nix_path.sh \ - case-hack.sh \ + nars.sh \ placeholders.sh \ ssh-relay.sh \ build.sh \ @@ -114,7 +114,6 @@ nix_tests = \ impure-env.sh \ debugger.sh \ extra-sandbox-profile.sh \ - help.sh ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh @@ -128,6 +127,10 @@ ifeq ($(ENABLE_BUILD), yes) endif endif +ifeq ($(ENABLE_DOC_GEN), yes) + nix_tests += help.sh +endif + $(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \ $(buildprefix)$(d)/test-libstoreconsumer/test-libstoreconsumer $(d)/plugins.sh.test $(d)/plugins.sh.test-debug: \ diff --git a/tests/functional/logging.sh b/tests/functional/logging.sh index bd80a9163..c026ac9c2 100755 --- a/tests/functional/logging.sh +++ b/tests/functional/logging.sh @@ -22,7 +22,7 @@ nix-build dependencies.nix --no-out-link --compress-build-log builder="$(realpath "$(mktemp)")" echo -e "#!/bin/sh\nmkdir \$out" > "$builder" outp="$(nix-build -E \ - 'with import ./config.nix; mkDerivation { name = "fnord"; builder = '"$builder"'; }' \ + 'with import '"${config_nix}"'; mkDerivation { name = "fnord"; builder = '"$builder"'; }' \ --out-link "$(mktemp -d)/result")" test -d "$outp" diff --git a/tests/functional/meson.build b/tests/functional/meson.build index ebecdd9e8..0d46f9ce2 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -1,4 +1,4 @@ -project('nix-functional-tests', 'cpp', +project('nix-functional-tests', version : files('.version'), default_options : [ 'cpp_std=c++2a', @@ -14,31 +14,15 @@ project('nix-functional-tests', 'cpp', fs = import('fs') -# Need to combine source and build trees -run_command( - 'rsync', - '-a', - '--copy-unsafe-links', - meson.current_source_dir() / '', - meson.current_build_dir() / '', -) -# This current-source-escaping relative is no good because we don't know -# where the build directory will be, therefore we fix it up. Once the -# Make build system is gone, we should think about doing this better. -scripts_dir = fs.relative_to( - meson.current_source_dir() / '..' / '..' / 'scripts', - meson.current_build_dir(), -) -run_command( - 'sed', - '-i', meson.current_build_dir() / 'bash-profile.sh', - '-e', 's^../../scripts^@0@^'.format(scripts_dir), -) - nix = find_program('nix') bash = find_program('bash', native : true) busybox = find_program('busybox', native : true, required : false) -coreutils = find_program('coreutils', native : true) +if host_machine.system() == 'windows' + # Because of the state of symlinks on Windows, coreutils.exe doesn't usually exist, but things like ls.exe will + coreutils = find_program('ls', native : true) +else + coreutils = find_program('coreutils', native : true) +endif dot = find_program('dot', native : true, required : false) nix_bin_dir = fs.parent(nix.full_path()) @@ -157,9 +141,9 @@ suites = [ 'why-depends.sh', 'derivation-json.sh', 'derivation-advanced-attributes.sh', - 'import-derivation.sh', + 'import-from-derivation.sh', 'nix_path.sh', - 'case-hack.sh', + 'nars.sh', 'placeholders.sh', 'ssh-relay.sh', 'build.sh', @@ -185,12 +169,13 @@ suites = [ 'extra-sandbox-profile.sh', 'help.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), }, ] nix_store = dependency('nix-store', required : false) if nix_store.found() + add_languages('cpp') subdir('test-libstoreconsumer') suites += { 'name': 'libstoreconsumer', @@ -200,7 +185,7 @@ if nix_store.found() 'tests': [ 'test-libstoreconsumer.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), } endif @@ -208,6 +193,7 @@ endif # Plugin tests require shared libraries support. nix_expr = dependency('nix-expr', required : false) if nix_expr.found() and get_option('default_library') != 'static' + add_languages('cpp') subdir('plugins') suites += { 'name': 'plugins', @@ -217,7 +203,7 @@ if nix_expr.found() and get_option('default_library') != 'static' 'tests': [ 'plugins.sh', ], - 'workdir': meson.current_build_dir(), + 'workdir': meson.current_source_dir(), } endif @@ -228,14 +214,12 @@ subdir('git-hashing') subdir('local-overlay-store') foreach suite : suites + workdir = suite['workdir'] + suite_name = suite['name'] foreach script : suite['tests'] - workdir = suite['workdir'] - prefix = fs.relative_to(workdir, meson.project_build_root()) - - script = script # Turns, e.g., `tests/functional/flakes/show.sh` into a Meson test target called # `functional-flakes-show`. - name = fs.replace_suffix(prefix / script, '') + name = fs.replace_suffix(script, '') test( name, @@ -247,14 +231,17 @@ foreach suite : suites '-o', 'pipefail', script, ], - suite : suite['name'], + suite : suite_name, env : { - 'TEST_NAME': name, + '_NIX_TEST_SOURCE_DIR': meson.current_source_dir(), + '_NIX_TEST_BUILD_DIR': meson.current_build_dir(), + 'TEST_NAME': suite_name / name, 'NIX_REMOTE': '', 'PS4': '+(${BASH_SOURCE[0]-$0}:$LINENO) ', }, - # some tests take 15+ seconds even on an otherwise idle machine, on a loaded machine - # this can easily drive them to failure. give them more time than default of 30sec + # Some tests take 15+ seconds even on an otherwise idle machine; + # on a loaded machine this can easily drive them to failure. Give + # them more time than the default of 30 seconds. timeout : 300, # Used for target dependency/ordering tracking, not adding compiler flags or anything. depends : suite['deps'], diff --git a/tests/functional/multiple-outputs.nix b/tests/functional/multiple-outputs.nix index 6ba7c523d..19ae2a45d 100644 --- a/tests/functional/multiple-outputs.nix +++ b/tests/functional/multiple-outputs.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { diff --git a/tests/functional/name-after-node.nar b/tests/functional/name-after-node.nar new file mode 100644 index 000000000..3f5cd9d0b Binary files /dev/null and b/tests/functional/name-after-node.nar differ diff --git a/tests/functional/nar-access.nix b/tests/functional/nar-access.nix index 0e2a7f721..78972bd36 100644 --- a/tests/functional/nar-access.nix +++ b/tests/functional/nar-access.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { a = mkDerivation { @@ -20,4 +20,4 @@ asdom 12398 EOF ''; }; -} \ No newline at end of file +} diff --git a/tests/functional/nar-access.sh b/tests/functional/nar-access.sh index b254081cf..2b0a6a329 100755 --- a/tests/functional/nar-access.sh +++ b/tests/functional/nar-access.sh @@ -9,57 +9,57 @@ cd "$TEST_ROOT" # Dump path to nar. narFile="$TEST_ROOT/path.nar" -nix-store --dump $storePath > $narFile +nix-store --dump "$storePath" > "$narFile" # Check that find and nar ls match. -( cd $storePath; find . | sort ) > files.find -nix nar ls -R -d $narFile "" | sort > files.ls-nar +( cd "$storePath"; find . | sort ) > files.find +nix nar ls -R -d "$narFile" "" | sort > files.ls-nar diff -u files.find files.ls-nar # Check that file contents of data match. -nix nar cat $narFile /foo/data > data.cat-nar -diff -u data.cat-nar $storePath/foo/data +nix nar cat "$narFile" /foo/data > data.cat-nar +diff -u data.cat-nar "$storePath/foo/data" # Check that file contents of baz match. -nix nar cat $narFile /foo/baz > baz.cat-nar -diff -u baz.cat-nar $storePath/foo/baz +nix nar cat "$narFile" /foo/baz > baz.cat-nar +diff -u baz.cat-nar "$storePath/foo/baz" -nix store cat $storePath/foo/baz > baz.cat-nar -diff -u baz.cat-nar $storePath/foo/baz +nix store cat "$storePath/foo/baz" > baz.cat-nar +diff -u baz.cat-nar "$storePath/foo/baz" TODO_NixOS # Check that 'nix store cat' fails on invalid store paths. -invalidPath="$(dirname $storePath)/99999999999999999999999999999999-foo" -cp -r $storePath $invalidPath -expect 1 nix store cat $invalidPath/foo/baz +invalidPath="$(dirname "$storePath")/99999999999999999999999999999999-foo" +cp -r "$storePath" "$invalidPath" +expect 1 nix store cat "$invalidPath/foo/baz" # Test --json. diff -u \ - <(nix nar ls --json $narFile / | jq -S) \ + <(nix nar ls --json "$narFile" / | jq -S) \ <(echo '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' | jq -S) diff -u \ - <(nix nar ls --json -R $narFile /foo | jq -S) \ + <(nix nar ls --json -R "$narFile" /foo | jq -S) \ <(echo '{"type":"directory","entries":{"bar":{"type":"regular","size":0,"narOffset":368},"baz":{"type":"regular","size":0,"narOffset":552},"data":{"type":"regular","size":58,"narOffset":736}}}' | jq -S) diff -u \ - <(nix nar ls --json -R $narFile /foo/bar | jq -S) \ + <(nix nar ls --json -R "$narFile" /foo/bar | jq -S) \ <(echo '{"type":"regular","size":0,"narOffset":368}' | jq -S) diff -u \ - <(nix store ls --json $storePath | jq -S) \ + <(nix store ls --json "$storePath" | jq -S) \ <(echo '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' | jq -S) diff -u \ - <(nix store ls --json -R $storePath/foo | jq -S) \ + <(nix store ls --json -R "$storePath/foo" | jq -S) \ <(echo '{"type":"directory","entries":{"bar":{"type":"regular","size":0},"baz":{"type":"regular","size":0},"data":{"type":"regular","size":58}}}' | jq -S) diff -u \ - <(nix store ls --json -R $storePath/foo/bar| jq -S) \ + <(nix store ls --json -R "$storePath/foo/bar"| jq -S) \ <(echo '{"type":"regular","size":0}' | jq -S) # Test missing files. -expect 1 nix store ls --json -R $storePath/xyzzy 2>&1 | grep 'does not exist' -expect 1 nix store ls $storePath/xyzzy 2>&1 | grep 'does not exist' +expect 1 nix store ls --json -R "$storePath/xyzzy" 2>&1 | grep 'does not exist' +expect 1 nix store ls "$storePath/xyzzy" 2>&1 | grep 'does not exist' # Test failure to dump. -if nix-store --dump $storePath >/dev/full ; then +if nix-store --dump "$storePath" >/dev/full ; then echo "dumping to /dev/full should fail" - exit -1 + exit 1 fi diff --git a/tests/functional/nars.sh b/tests/functional/nars.sh new file mode 100755 index 000000000..dd90345a6 --- /dev/null +++ b/tests/functional/nars.sh @@ -0,0 +1,161 @@ +#!/usr/bin/env bash + +source common.sh + +TODO_NixOS + +clearStore + +# Check that NARs with duplicate directory entries are rejected. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "NAR directory is not sorted" + +# Check that nix-store --restore fails if the output already exists. +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out' already exists" + +rm -rf "$TEST_ROOT/out" +echo foo > "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "File exists" + +mkdir -p "$TEST_ROOT/out2" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < duplicate.nar | grepQuiet "path '.*/out' already exists" + +# The same, but for a regular file. +nix-store --dump ./nars.sh > "$TEST_ROOT/tmp.nar" + +rm -rf "$TEST_ROOT/out" +nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +mkdir -p "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +mkdir -p "$TEST_ROOT/out2" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +# The same, but for a symlink. +ln -sfn foo "$TEST_ROOT/symlink" +nix-store --dump "$TEST_ROOT/symlink" > "$TEST_ROOT/tmp.nar" + +rm -rf "$TEST_ROOT/out" +nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" +[[ -L "$TEST_ROOT/out" ]] +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +mkdir -p "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +rm -rf "$TEST_ROOT/out" +ln -s "$TEST_ROOT/out2" "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +mkdir -p "$TEST_ROOT/out2" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < "$TEST_ROOT/tmp.nar" | grepQuiet "File exists" + +# Check whether restoring and dumping a NAR that contains case +# collisions is round-tripping, even on a case-insensitive system. +rm -rf "$TEST_ROOT/case" +opts=("--option" "use-case-hack" "true") +nix-store "${opts[@]}" --restore "$TEST_ROOT/case" < case.nar +[[ -e "$TEST_ROOT/case/xt_CONNMARK.h" ]] +[[ -e "$TEST_ROOT/case/xt_CONNmark.h~nix~case~hack~1" ]] +[[ -e "$TEST_ROOT/case/xt_connmark.h~nix~case~hack~2" ]] +[[ -e "$TEST_ROOT/case/x/FOO" ]] +[[ -d "$TEST_ROOT/case/x/Foo~nix~case~hack~1" ]] +[[ -e "$TEST_ROOT/case/x/foo~nix~case~hack~2/a~nix~case~hack~1/foo" ]] +nix-store "${opts[@]}" --dump "$TEST_ROOT/case" > "$TEST_ROOT/case.nar" +cmp case.nar "$TEST_ROOT/case.nar" +[ "$(nix-hash "${opts[@]}" --type sha256 "$TEST_ROOT/case")" = "$(nix-hash --flat --type sha256 case.nar)" ] + +# Check whether we detect true collisions (e.g. those remaining after +# removal of the suffix). +touch "$TEST_ROOT/case/xt_CONNMARK.h~nix~case~hack~3" +(! nix-store "${opts[@]}" --dump "$TEST_ROOT/case" > /dev/null) + +# Detect NARs that have a directory entry that after case-hacking +# collides with another entry (e.g. a directory containing 'Test', +# 'Test~nix~case~hack~1' and 'test'). +rm -rf "$TEST_ROOT/case" +expectStderr 1 nix-store "${opts[@]}" --restore "$TEST_ROOT/case" < case-collision.nar | grepQuiet "NAR contains file name 'test' that collides with case-hacked file name 'Test~nix~case~hack~1'" + +# Deserializing a NAR that contains file names that Unicode-normalize to the +# same name should fail on macOS and specific Linux setups (typically ZFS with +# `utf8only` enabled and `normalization` set to anything else than `none`). The +# deserialization should succeed on most Linux, where file names aren't +# unicode-normalized. +# +# We test that: +# +# 1. It either succeeds or fails with "already exists" error. +# 2. Nix has the same behavior with respect to unicode normalization than +# $TEST_ROOT's filesystem (when using basic Unix commands) +rm -rf "$TEST_ROOT/out" +set +e +unicodeTestOut=$(nix-store --restore "$TEST_ROOT/out" < unnormalized.nar 2>&1) +unicodeTestCode=$? +set -e + +touch "$TEST_ROOT/unicode-â" # non-canonical version +touch "$TEST_ROOT/unicode-â" + +touchFilesCount=$(find "$TEST_ROOT" -maxdepth 1 -name "unicode-*" -type f | wc -l) + +if (( unicodeTestCode == 1 )); then + # If the command failed (MacOS or ZFS + normalization), checks that it failed + # with the expected "already exists" error, and that this is the same + # behavior as `touch` + echo "$unicodeTestOut" | grepQuiet "path '.*/out/â' already exists" + + (( touchFilesCount == 1 )) +elif (( unicodeTestCode == 0 )); then + # If the command succeeded, check that both files are present, and that this + # is the same behavior as `touch` + [[ -e $TEST_ROOT/out/â ]] + [[ -e $TEST_ROOT/out/â ]] + + (( touchFilesCount == 2 )) +else + # if the return code is neither 0 or 1, fail the test. + echo "NAR deserialization of files with the same Unicode normalization failed with unexpected return code $unicodeTestCode" >&2 + exit 1 +fi + +rm -f "$TEST_ROOT/unicode-*" + +# Unpacking a NAR with a NUL character in a file name should fail. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < nul-character.nar | grepQuiet "NAR contains invalid file name 'f" + +# Likewise for a '.' filename. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < dot.nar | grepQuiet "NAR contains invalid file name '.'" + +# Likewise for a '..' filename. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < dotdot.nar | grepQuiet "NAR contains invalid file name '..'" + +# Likewise for a filename containing a slash. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < slash.nar | grepQuiet "NAR contains invalid file name 'x/y'" + +# Likewise for an empty filename. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < empty.nar | grepQuiet "NAR contains invalid file name ''" + +# Test that the 'executable' field cannot come before the 'contents' field. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < executable-after-contents.nar | grepQuiet "expected tag ')', got 'executable'" + +# Test that the 'name' field cannot come before the 'node' field in a directory entry. +rm -rf "$TEST_ROOT/out" +expectStderr 1 nix-store --restore "$TEST_ROOT/out" < name-after-node.nar | grepQuiet "expected tag 'name'" diff --git a/tests/functional/nested-sandboxing.sh b/tests/functional/nested-sandboxing.sh index ae0256de2..7462d2968 100755 --- a/tests/functional/nested-sandboxing.sh +++ b/tests/functional/nested-sandboxing.sh @@ -8,6 +8,15 @@ TODO_NixOS requireSandboxSupport +start="$TEST_ROOT/start" +mkdir -p "$start" +cp -r common common.sh ${config_nix} ./nested-sandboxing "$start" +cp "${_NIX_TEST_BUILD_DIR}/common/subst-vars.sh" "$start/common" +# N.B. redefine +_NIX_TEST_SOURCE_DIR="$start" +_NIX_TEST_BUILD_DIR="$start" +cd "$start" + source ./nested-sandboxing/command.sh expectStderr 100 runNixBuild badStoreUrl 2 | grepQuiet '`sandbox-build-dir` must not contain' diff --git a/tests/functional/nested-sandboxing/runner.nix b/tests/functional/nested-sandboxing/runner.nix index 1e79d5065..48ad512b3 100644 --- a/tests/functional/nested-sandboxing/runner.nix +++ b/tests/functional/nested-sandboxing/runner.nix @@ -19,6 +19,9 @@ mkDerivation { export PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH + export _NIX_TEST_SOURCE_DIR=$PWD + export _NIX_TEST_BUILD_DIR=$PWD + source common.sh source ./nested-sandboxing/command.sh diff --git a/tests/functional/nix-build-examples.nix b/tests/functional/nix-build-examples.nix index e54dbbf62..aaea8fc07 100644 --- a/tests/functional/nix-build-examples.nix +++ b/tests/functional/nix-build-examples.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { diff --git a/tests/functional/nix-channel.sh b/tests/functional/nix-channel.sh index a4870e7a8..16d6a1355 100755 --- a/tests/functional/nix-channel.sh +++ b/tests/functional/nix-channel.sh @@ -35,7 +35,7 @@ drvPath=$(nix-instantiate dependencies.nix) nix copy --to file://$TEST_ROOT/foo?compression="bzip2" $(nix-store -r "$drvPath") rm -rf $TEST_ROOT/nixexprs mkdir -p $TEST_ROOT/nixexprs -cp config.nix dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/ +cp "${config_nix}" dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/ ln -s dependencies.nix $TEST_ROOT/nixexprs/default.nix (cd $TEST_ROOT && tar cvf - nixexprs) | bzip2 > $TEST_ROOT/foo/nixexprs.tar.bz2 diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index e2f19b99e..7cf5fcb74 100755 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -47,7 +47,7 @@ printf World > $flake1Dir/who printf 1.0 > $flake1Dir/version printf false > $flake1Dir/ca.nix -cp ./config.nix $flake1Dir/ +cp "${config_nix}" $flake1Dir/ # Test upgrading from nix-env. nix-env -f ./user-envs.nix -i foo-1.0 @@ -140,7 +140,7 @@ nix profile install $(nix-build --no-out-link ./simple.nix) # Test packages with same name from different sources mkdir $TEST_ROOT/simple-too -cp ./simple.nix ./config.nix simple.builder.sh $TEST_ROOT/simple-too +cp ./simple.nix "${config_nix}" simple.builder.sh $TEST_ROOT/simple-too nix profile install --file $TEST_ROOT/simple-too/simple.nix '' nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple-1' nix profile remove simple 2>&1 | grep 'removed 1 packages' diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index b9625eb66..2b78216f4 100755 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -31,6 +31,15 @@ output=$(nix-shell --pure --keep SELECTED_IMPURE_VAR "$shellDotNix" -A shellDrv [ "$output" = " - foo - bar - baz" ] +# test NIX_BUILD_TOP +testTmpDir=$(pwd)/nix-shell +mkdir -p "$testTmpDir" +output=$(TMPDIR="$testTmpDir" nix-shell --pure "$shellDotNix" -A shellDrv --run 'echo $NIX_BUILD_TOP') +[[ "$output" =~ ${testTmpDir}.* ]] || { + echo "expected $output =~ ${testTmpDir}.*" >&2 + exit 1 +} + # Test nix-shell on a .drv [[ $(nix-shell --pure $(nix-instantiate "$shellDotNix" -A shellDrv) --run \ 'echo "$IMPURE_VAR - $VAR_FROM_STDENV_SETUP - $VAR_FROM_NIX - $TEST_inNixShell"') = " - foo - bar - false" ]] @@ -70,7 +79,7 @@ sed -e "s|@ENV_PROG@|$(type -P env)|" shell.shebang.expr > $TEST_ROOT/shell.sheb chmod a+rx $TEST_ROOT/shell.shebang.expr # Should fail due to expressions using relative path ! $TEST_ROOT/shell.shebang.expr bar -cp shell.nix config.nix $TEST_ROOT +cp shell.nix "${config_nix}" $TEST_ROOT # Should succeed echo "cwd: $PWD" output=$($TEST_ROOT/shell.shebang.expr bar) @@ -117,7 +126,7 @@ $TEST_ROOT/shell.shebang.nix mkdir $TEST_ROOT/lookup-test $TEST_ROOT/empty echo "import $shellDotNix" > $TEST_ROOT/lookup-test/shell.nix -cp config.nix $TEST_ROOT/lookup-test/ +cp "${config_nix}" $TEST_ROOT/lookup-test/ echo 'abort "do not load default.nix!"' > $TEST_ROOT/lookup-test/default.nix nix-shell $TEST_ROOT/lookup-test -A shellDrv --run 'echo "it works"' | grepQuiet "it works" diff --git a/tests/functional/nul-character.nar b/tests/functional/nul-character.nar new file mode 100644 index 000000000..9ae48baf6 Binary files /dev/null and b/tests/functional/nul-character.nar differ diff --git a/tests/functional/optimise-store.sh b/tests/functional/optimise-store.sh index 0bedafc43..05c4c41e4 100755 --- a/tests/functional/optimise-store.sh +++ b/tests/functional/optimise-store.sh @@ -4,8 +4,8 @@ source common.sh clearStoreIfPossible -outPath1=$(echo 'with import ./config.nix; mkDerivation { name = "foo1"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link --auto-optimise-store) -outPath2=$(echo 'with import ./config.nix; mkDerivation { name = "foo2"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link --auto-optimise-store) +outPath1=$(echo 'with import '"${config_nix}"'; mkDerivation { name = "foo1"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link --auto-optimise-store) +outPath2=$(echo 'with import '"${config_nix}"'; mkDerivation { name = "foo2"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link --auto-optimise-store) TODO_NixOS # ignoring the client-specified setting 'auto-optimise-store', because it is a restricted setting and you are not a trusted user # TODO: only continue when trusted user or root @@ -23,7 +23,7 @@ if [ "$nlink" != 3 ]; then exit 1 fi -outPath3=$(echo 'with import ./config.nix; mkDerivation { name = "foo3"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link) +outPath3=$(echo 'with import '"${config_nix}"'; mkDerivation { name = "foo3"; builder = builtins.toFile "builder" "mkdir $out; echo hello > $out/foo"; }' | nix-build - --no-out-link) inode3="$(stat --format=%i $outPath3/foo)" if [ "$inode1" = "$inode3" ]; then diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 277711123..d1582b05d 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -1,12 +1,10 @@ { lib , stdenv , mkMesonDerivation -, releaseTools , meson , ninja , pkg-config -, rsync , jq , git @@ -17,14 +15,11 @@ , nix-expr , nix-cli -, rapidcheck -, gtest -, runCommand - , busybox-sandbox-shell ? null # Configuration Options +, pname ? "nix-functional-tests" , version # For running the functional tests against a different pre-built Nix. @@ -36,8 +31,7 @@ let in mkMesonDerivation (finalAttrs: { - pname = "nix-functional-tests"; - inherit version; + inherit pname version; workDir = ./.; fileset = fileset.unions [ @@ -48,11 +42,10 @@ mkMesonDerivation (finalAttrs: { ]; # Hack for sake of the dev shell - passthru.baseNativeBuildInputs = [ + passthru.externalNativeBuildInputs = [ meson ninja pkg-config - rsync jq git @@ -62,11 +55,12 @@ mkMesonDerivation (finalAttrs: { # etc. busybox-sandbox-shell # For Overlay FS tests need `mount`, `umount`, and `unshare`. + # For `script` command (ensuring a TTY) # TODO use `unixtools` to be precise over which executables instead? util-linux ]; - nativeBuildInputs = finalAttrs.passthru.baseNativeBuildInputs ++ [ + nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ nix-cli ]; @@ -75,7 +69,6 @@ mkMesonDerivation (finalAttrs: { nix-expr ]; - preConfigure = # "Inline" .version so it's not a symlink, and includes the suffix. # Do the meson utils, without modification. @@ -95,13 +88,6 @@ mkMesonDerivation (finalAttrs: { "--print-errorlogs" ]; - preCheck = - # See https://github.com/NixOS/nix/issues/2523 - # Occurs often in tests since https://github.com/NixOS/nix/pull/9900 - lib.optionalString stdenv.hostPlatform.isDarwin '' - export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES - ''; - doCheck = true; installPhase = '' diff --git a/tests/functional/parallel.nix b/tests/functional/parallel.nix index 23f142059..1f2411c92 100644 --- a/tests/functional/parallel.nix +++ b/tests/functional/parallel.nix @@ -1,6 +1,6 @@ {sleepTime ? 3}: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let diff --git a/tests/functional/pass-as-file.sh b/tests/functional/pass-as-file.sh index 6487bfffd..66a8e588e 100755 --- a/tests/functional/pass-as-file.sh +++ b/tests/functional/pass-as-file.sh @@ -5,7 +5,7 @@ source common.sh clearStoreIfPossible outPath=$(nix-build --no-out-link -E " -with import ./config.nix; +with import ${config_nix}; mkDerivation { name = \"pass-as-file\"; diff --git a/tests/functional/path.nix b/tests/functional/path.nix index 883c3c41b..b23300f90 100644 --- a/tests/functional/path.nix +++ b/tests/functional/path.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "filter"; diff --git a/tests/functional/placeholders.sh b/tests/functional/placeholders.sh index 33ec0c2b7..374203af8 100755 --- a/tests/functional/placeholders.sh +++ b/tests/functional/placeholders.sh @@ -5,7 +5,7 @@ source common.sh clearStoreIfPossible nix-build --no-out-link -E ' - with import ./config.nix; + with import '"${config_nix}"'; mkDerivation { name = "placeholders"; diff --git a/tests/functional/plugins.sh b/tests/functional/plugins.sh index fc2d1907c..b685a8878 100755 --- a/tests/functional/plugins.sh +++ b/tests/functional/plugins.sh @@ -3,7 +3,7 @@ source common.sh for ext in so dylib; do - plugin="$PWD/plugins/libplugintest.$ext" + plugin="${_NIX_TEST_BUILD_DIR}/plugins/libplugintest.$ext" [[ -f "$plugin" ]] && break done diff --git a/tests/functional/readfile-context.nix b/tests/functional/readfile-context.nix index 54cd1afd9..b8f4a4c27 100644 --- a/tests/functional/readfile-context.nix +++ b/tests/functional/readfile-context.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let diff --git a/tests/functional/recursive.nix b/tests/functional/recursive.nix index fa8cc04db..fe438f0ba 100644 --- a/tests/functional/recursive.nix +++ b/tests/functional/recursive.nix @@ -1,4 +1,5 @@ -with import ./config.nix; +let config_nix = /. + "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; in +with import config_nix; mkDerivation rec { name = "recursive"; @@ -41,7 +42,7 @@ mkDerivation rec { # Build a derivation. nix $opts build -L --impure --expr ' - with import ${./config.nix}; + with import ${config_nix}; mkDerivation { name = "inner1"; buildCommand = "echo $fnord blaat > $out"; diff --git a/tests/functional/repl/doc-functor.expected b/tests/functional/repl/doc-functor.expected new file mode 100644 index 000000000..8cb2706ef --- /dev/null +++ b/tests/functional/repl/doc-functor.expected @@ -0,0 +1,101 @@ +Nix +Type :? for help. + +nix-repl> :l doc-functor.nix +Added variables. + +nix-repl> :doc multiplier +Function `__functor`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:12:23 + + +Multiply the argument by the factor stored in the factor attribute. + +nix-repl> :doc doubler +Function `multiply`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:5:17 + + +Look, it's just like a function! + +nix-repl> :doc recursive +Function `__functor`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:77:23 + + +This looks bad, but the docs are ok because of the eta expansion. + +nix-repl> :doc recursive2 +error: + … while partially calling '__functor' to retrieve documentation + + … while calling '__functor' + at /path/to/tests/functional/repl/doc-functor.nix:85:17: + 84| */ + 85| __functor = self: self.__functor self; + | ^ + 86| }; + + … from call site + at /path/to/tests/functional/repl/doc-functor.nix:85:23: + 84| */ + 85| __functor = self: self.__functor self; + | ^ + 86| }; + + (19999 duplicate frames omitted) + + error: stack overflow; max-call-depth exceeded + at /path/to/tests/functional/repl/doc-functor.nix:85:23: + 84| */ + 85| __functor = self: self.__functor self; + | ^ + 86| }; + +nix-repl> :doc diverging +error: + … while partially calling '__functor' to retrieve documentation + + (10000 duplicate frames omitted) + + … while calling '__functor' + at /path/to/tests/functional/repl/doc-functor.nix:97:19: + 96| f = x: { + 97| __functor = self: (f (x + 1)); + | ^ + 98| }; + + error: stack overflow; max-call-depth exceeded + at /path/to/tests/functional/repl/doc-functor.nix:97:26: + 96| f = x: { + 97| __functor = self: (f (x + 1)); + | ^ + 98| }; + +nix-repl> :doc helper +Function `square`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:36:12 + + +Compute x^2 + +nix-repl> :doc helper2 +Function `__functor`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:45:23 + + +This is a function that can be overridden. + +nix-repl> :doc lib.helper3 +Function `__functor`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:45:23 + + +This is a function that can be overridden. + +nix-repl> :doc helper3 +Function `__functor`\ + … defined at /path/to/tests/functional/repl/doc-functor.nix:45:23 + + +This is a function that can be overridden. diff --git a/tests/functional/repl/doc-functor.in b/tests/functional/repl/doc-functor.in new file mode 100644 index 000000000..d2bb57a02 --- /dev/null +++ b/tests/functional/repl/doc-functor.in @@ -0,0 +1,10 @@ +:l doc-functor.nix +:doc multiplier +:doc doubler +:doc recursive +:doc recursive2 +:doc diverging +:doc helper +:doc helper2 +:doc lib.helper3 +:doc helper3 diff --git a/tests/functional/repl/doc-functor.nix b/tests/functional/repl/doc-functor.nix new file mode 100644 index 000000000..f526f453f --- /dev/null +++ b/tests/functional/repl/doc-functor.nix @@ -0,0 +1,101 @@ +rec { + /** + Look, it's just like a function! + */ + multiply = p: q: p * q; + + multiplier = { + factor = 2; + /** + Multiply the argument by the factor stored in the factor attribute. + */ + __functor = self: x: x * self.factor; + }; + + doubler = { + description = "bla"; + /** + Multiply by two. This doc probably won't be rendered because the + returned partial application won't have any reference to this location; + only pointing to the second lambda in the multiply function. + */ + __functor = self: multiply 2; + }; + + makeOverridable = f: { + /** + This is a function that can be overridden. + */ + __functor = self: f; + override = throw "not implemented"; + }; + + /** + Compute x^2 + */ + square = x: x * x; + + helper = makeOverridable square; + + # Somewhat analogous to the Nixpkgs makeOverridable function. + makeVeryOverridable = f: { + /** + This is a function that can be overridden. + */ + __functor = self: arg: f arg // { override = throw "not implemented"; overrideAttrs = throw "not implemented"; }; + override = throw "not implemented"; + }; + + helper2 = makeVeryOverridable square; + + # The RFC might be ambiguous here. The doc comment from makeVeryOverridable + # is "inner" in terms of values, but not inner in terms of expressions. + # Returning the following attribute comment might be allowed. + # TODO: I suppose we could look whether the attribute value expression + # contains a doc, and if not, return the attribute comment anyway? + + /** + Compute x^3 + */ + lib.helper3 = makeVeryOverridable (x: x * x * x); + + /** + Compute x^3... + */ + helper3 = makeVeryOverridable (x: x * x * x); + + + # ------ + + # getDoc traverses a potentially infinite structure in case of __functor, so + # we need to test with recursive inputs and diverging inputs. + + recursive = { + /** + This looks bad, but the docs are ok because of the eta expansion. + */ + __functor = self: x: self x; + }; + + recursive2 = { + /** + Docs probably won't work in this case, because the "partial" application + of self results in an infinite recursion. + */ + __functor = self: self.__functor self; + }; + + diverging = let + /** + Docs probably won't work in this case, because the "partial" application + of self results in an diverging computation that causes a stack overflow. + It's not an infinite recursion because each call is different. + This must be handled by the documentation retrieval logic, as it + reimplements the __functor invocation to be partial. + */ + f = x: { + __functor = self: (f (x + 1)); + }; + in f null; + +} diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index e5fe9c136..63bf56cd7 100755 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -7,8 +7,19 @@ clearStoreIfPossible nix-instantiate --restrict-eval --eval -E '1 + 2' (! nix-instantiate --eval --restrict-eval ./restricted.nix) (! nix-instantiate --eval --restrict-eval <(echo '1 + 2')) + +mkdir -p "$TEST_ROOT/nix" +cp ./simple.nix "$TEST_ROOT/nix" +cp ./simple.builder.sh "$TEST_ROOT/nix" +cp "${config_nix}" "$TEST_ROOT/nix" +simple_nix="$TEST_ROOT/nix/simple.nix" +# N.B. redefine +config_nix="$TEST_ROOT/nix/config.nix" +removeBuildDirRef "${simple_nix}" +cd "$TEST_ROOT/nix" + nix-instantiate --restrict-eval ./simple.nix -I src=. -nix-instantiate --restrict-eval ./simple.nix -I src1=simple.nix -I src2=config.nix -I src3=./simple.builder.sh +nix-instantiate --restrict-eval ./simple.nix -I src1=./simple.nix -I src2=./config.nix -I src3=./simple.builder.sh # no default NIX_PATH (unset NIX_PATH; ! nix-instantiate --restrict-eval --find-file .) @@ -19,25 +30,25 @@ nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I sr expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "forbidden in restricted mode" nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. -p=$(nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval --allowed-uris "file://$(pwd)") -cmp $p restricted.sh +p=$(nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval --allowed-uris "file://${_NIX_TEST_SOURCE_DIR}") +cmp "$p" "${_NIX_TEST_SOURCE_DIR}/restricted.sh" -(! nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval) +(! nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval) -(! nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval --allowed-uris "file://$(pwd)/restricted.sh/") +(! nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval --allowed-uris "file://${_NIX_TEST_SOURCE_DIR}/restricted.sh/") -nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval --allowed-uris "file://$(pwd)/restricted.sh" +nix eval --raw --expr "builtins.fetchurl file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" --impure --restrict-eval --allowed-uris "file://${_NIX_TEST_SOURCE_DIR}/restricted.sh" (! nix eval --raw --expr "builtins.fetchurl https://github.com/NixOS/patchelf/archive/master.tar.gz" --impure --restrict-eval) (! nix eval --raw --expr "builtins.fetchTarball https://github.com/NixOS/patchelf/archive/master.tar.gz" --impure --restrict-eval) (! nix eval --raw --expr "fetchGit git://github.com/NixOS/patchelf.git" --impure --restrict-eval) -ln -sfn $(pwd)/restricted.nix $TEST_ROOT/restricted.nix +ln -sfn "${_NIX_TEST_SOURCE_DIR}/restricted.nix" "$TEST_ROOT/restricted.nix" [[ $(nix-instantiate --eval $TEST_ROOT/restricted.nix) == 3 ]] (! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix) (! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT) (! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I .) -nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT -I . +nix-instantiate --eval --restrict-eval "$TEST_ROOT/restricted.nix" -I "$TEST_ROOT" -I "${_NIX_TEST_SOURCE_DIR}" [[ $(nix eval --raw --impure --restrict-eval -I . --expr 'builtins.readFile "${import ./simple.nix}/hello"') == 'Hello World!' ]] @@ -54,12 +65,12 @@ expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { pr [[ $(nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d) == '{ "tunnel.d" = "directory"; }' ]] # Check whether we can leak symlink information through directory traversal. -traverseDir="$(pwd)/restricted-traverse-me" -ln -sfn "$(pwd)/restricted-secret" "$(pwd)/restricted-innocent" +traverseDir="${_NIX_TEST_SOURCE_DIR}/restricted-traverse-me" +ln -sfn "${_NIX_TEST_SOURCE_DIR}/restricted-secret" "${_NIX_TEST_SOURCE_DIR}/restricted-innocent" mkdir -p "$traverseDir" goUp="..$(echo "$traverseDir" | sed -e 's,[^/]\+,..,g')" output="$(nix eval --raw --restrict-eval -I "$traverseDir" \ - --expr "builtins.readFile \"$traverseDir/$goUp$(pwd)/restricted-innocent\"" \ + --expr "builtins.readFile \"$traverseDir/$goUp${_NIX_TEST_SOURCE_DIR}/restricted-innocent\"" \ 2>&1 || :)" echo "$output" | grep "is forbidden" echo "$output" | grepInverse -F restricted-secret diff --git a/tests/functional/search.nix b/tests/functional/search.nix index fea6e7a7a..3c3564bda 100644 --- a/tests/functional/search.nix +++ b/tests/functional/search.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; { hello = mkDerivation rec { diff --git a/tests/functional/secure-drv-outputs.nix b/tests/functional/secure-drv-outputs.nix index b4ac8ff53..cd111c315 100644 --- a/tests/functional/secure-drv-outputs.nix +++ b/tests/functional/secure-drv-outputs.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; { diff --git a/tests/functional/selfref-gc.sh b/tests/functional/selfref-gc.sh index 518aea66b..dc4f14cc1 100755 --- a/tests/functional/selfref-gc.sh +++ b/tests/functional/selfref-gc.sh @@ -7,7 +7,7 @@ requireDaemonNewerThan "2.6.0pre20211215" clearStoreIfPossible nix-build --no-out-link -E ' - with import ./config.nix; + with import '"${config_nix}"'; let d1 = mkDerivation { name = "selfref-gc"; diff --git a/tests/functional/shell-hello.nix b/tests/functional/shell-hello.nix index c920d7cb4..fa02e2bb4 100644 --- a/tests/functional/shell-hello.nix +++ b/tests/functional/shell-hello.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; rec { hello = mkDerivation { diff --git a/tests/functional/shell.nix b/tests/functional/shell.nix index 9cae14b78..f6622a487 100644 --- a/tests/functional/shell.nix +++ b/tests/functional/shell.nix @@ -1,6 +1,6 @@ { inNixShell ? false, contentAddressed ? false, fooContents ? "foo" }: -let cfg = import ./config.nix; in +let cfg = import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; in with cfg; let diff --git a/tests/functional/shell.sh b/tests/functional/shell.sh index 7590d0121..30331e409 100755 --- a/tests/functional/shell.sh +++ b/tests/functional/shell.sh @@ -27,22 +27,24 @@ expect 1 nix shell -f shell-hello.nix forbidden-symlink -c hello 2>&1 | grepQuie # For instance, we might set an environment variable temporarily to affect some # initialization or whatnot, but this must not leak into the environment of the # command being run. -env > $TEST_ROOT/expected-env -nix shell -f shell-hello.nix hello -c env > $TEST_ROOT/actual-env +env > "$TEST_ROOT/expected-env" +nix shell -f shell-hello.nix hello -c env > "$TEST_ROOT/actual-env" # Remove/reset variables we expect to be different. # - PATH is modified by nix shell +# - we unset TMPDIR on macOS if it contains /var/folders # - _ is set by bash and is expectedf to differ because it contains the original command # - __CF_USER_TEXT_ENCODING is set by macOS and is beyond our control # - IN_NIX3_SHELL is set by nix shell sed -i \ -e 's/PATH=.*/PATH=.../' \ -e 's/_=.*/_=.../' \ + -e '/^TMPDIR=\/var\/folders\/.*/d' \ -e '/^__CF_USER_TEXT_ENCODING=.*$/d' \ -e '/^IN_NIX3_SHELL=1$/d' \ - $TEST_ROOT/expected-env $TEST_ROOT/actual-env -sort $TEST_ROOT/expected-env > $TEST_ROOT/expected-env.sorted -sort $TEST_ROOT/actual-env > $TEST_ROOT/actual-env.sorted -diff $TEST_ROOT/expected-env.sorted $TEST_ROOT/actual-env.sorted + "$TEST_ROOT/expected-env" "$TEST_ROOT/actual-env" +sort "$TEST_ROOT/expected-env" > "$TEST_ROOT/expected-env.sorted" +sort "$TEST_ROOT/actual-env" > "$TEST_ROOT/actual-env.sorted" +diff "$TEST_ROOT/expected-env.sorted" "$TEST_ROOT/actual-env.sorted" if isDaemonNewer "2.20.0pre20231220"; then # Test that command line attribute ordering is reflected in the PATH @@ -53,8 +55,8 @@ fi requireSandboxSupport -chmod -R u+w $TEST_ROOT/store0 || true -rm -rf $TEST_ROOT/store0 +chmod -R u+w "$TEST_ROOT/store0" || true +rm -rf "$TEST_ROOT/store0" clearStore @@ -64,10 +66,10 @@ path=$(nix eval --raw -f shell-hello.nix hello) # visible in the sandbox. nix shell --sandbox-build-dir /build-tmp \ --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \ - --store $TEST_ROOT/store0 -f shell-hello.nix hello -c hello | grep 'Hello World' + --store "$TEST_ROOT/store0" -f shell-hello.nix hello -c hello | grep 'Hello World' -path2=$(nix shell --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f shell-hello.nix hello -c $SHELL -c 'type -p hello') +path2=$(nix shell --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store "$TEST_ROOT/store0" -f shell-hello.nix hello -c "$SHELL" -c 'type -p hello') -[[ $path/bin/hello = $path2 ]] +[[ "$path/bin/hello" = "$path2" ]] -[[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]] +[[ -e $TEST_ROOT/store0/nix/store/$(basename "$path")/bin/hello ]] diff --git a/tests/functional/simple-failing.nix b/tests/functional/simple-failing.nix index d176c9c51..228971734 100644 --- a/tests/functional/simple-failing.nix +++ b/tests/functional/simple-failing.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "simple-failing"; diff --git a/tests/functional/simple.nix b/tests/functional/simple.nix index 2035ca294..96237695c 100644 --- a/tests/functional/simple.nix +++ b/tests/functional/simple.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; mkDerivation { name = "simple"; diff --git a/tests/functional/slash.nar b/tests/functional/slash.nar new file mode 100644 index 000000000..118a60216 Binary files /dev/null and b/tests/functional/slash.nar differ diff --git a/tests/functional/structured-attrs-shell.nix b/tests/functional/structured-attrs-shell.nix index 57c1e6bd2..7ed28c03f 100644 --- a/tests/functional/structured-attrs-shell.nix +++ b/tests/functional/structured-attrs-shell.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let dep = mkDerivation { name = "dep"; diff --git a/tests/functional/structured-attrs.nix b/tests/functional/structured-attrs.nix index e93139a44..ae461c21a 100644 --- a/tests/functional/structured-attrs.nix +++ b/tests/functional/structured-attrs.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; let diff --git a/tests/functional/supplementary-groups.sh b/tests/functional/supplementary-groups.sh index 5d329efc9..50259a3e1 100755 --- a/tests/functional/supplementary-groups.sh +++ b/tests/functional/supplementary-groups.sh @@ -9,7 +9,7 @@ needLocalStore "The test uses --store always so we would just be bypassing the d TODO_NixOS -unshare --mount --map-root-user bash <; 1 + 2' -I fnord=file:///no-such-tarball"$ext" (! nix-instantiate --eval -E ' 1' -I fnord=file:///no-such-tarball"$ext") - nix-instantiate --eval -E '' -I fnord=file:///no-such-tarball"$ext" -I fnord=. + nix-instantiate --eval -E '' -I fnord=file:///no-such-tarball"$ext" -I fnord="${_NIX_TEST_BUILD_DIR}" # Ensure that the `name` attribute isn’t accepted as that would mess # with the content-addressing @@ -100,3 +97,17 @@ chmod +x "$TEST_ROOT/tar_root/foo" tar cvf "$TEST_ROOT/tar.tar" -C "$TEST_ROOT/tar_root" . path="$(nix flake prefetch --refresh --json "tarball+file://$TEST_ROOT/tar.tar" | jq -r .storePath)" [[ $(cat "$path/foo") = bar ]] + +# Test a tarball with non-contiguous directory entries. +rm -rf "$TEST_ROOT/tar_root" +mkdir -p "$TEST_ROOT/tar_root/a/b" +echo foo > "$TEST_ROOT/tar_root/a/b/foo" +echo bla > "$TEST_ROOT/tar_root/bla" +tar cvf "$TEST_ROOT/tar.tar" -C "$TEST_ROOT/tar_root" . +echo abc > "$TEST_ROOT/tar_root/bla" +echo xyzzy > "$TEST_ROOT/tar_root/a/b/xyzzy" +tar rvf "$TEST_ROOT/tar.tar" -C "$TEST_ROOT/tar_root" ./a/b/xyzzy ./bla +path="$(nix flake prefetch --refresh --json "tarball+file://$TEST_ROOT/tar.tar" | jq -r .storePath)" +[[ $(cat "$path/a/b/xyzzy") = xyzzy ]] +[[ $(cat "$path/a/b/foo") = foo ]] +[[ $(cat "$path/bla") = abc ]] diff --git a/tests/functional/test-libstoreconsumer.sh b/tests/functional/test-libstoreconsumer.sh index 2adead1c0..5b019a1d0 100755 --- a/tests/functional/test-libstoreconsumer.sh +++ b/tests/functional/test-libstoreconsumer.sh @@ -4,5 +4,5 @@ source common.sh drv="$(nix-instantiate simple.nix)" cat "$drv" -out="$(./test-libstoreconsumer/test-libstoreconsumer "$drv")" +out="$("${_NIX_TEST_BUILD_DIR}/test-libstoreconsumer/test-libstoreconsumer" "$drv")" grep -F "Hello World!" < "$out/hello" diff --git a/tests/functional/timeout.nix b/tests/functional/timeout.nix index d0e949e31..ad71e61e2 100644 --- a/tests/functional/timeout.nix +++ b/tests/functional/timeout.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; { diff --git a/tests/functional/unnormalized.nar b/tests/functional/unnormalized.nar new file mode 100644 index 000000000..4b7edb17e Binary files /dev/null and b/tests/functional/unnormalized.nar differ diff --git a/tests/functional/user-envs.nix b/tests/functional/user-envs.nix index 46f8b51dd..c8e846d4b 100644 --- a/tests/functional/user-envs.nix +++ b/tests/functional/user-envs.nix @@ -2,7 +2,7 @@ { foo ? "foo" }: -with import ./config.nix; +with import "${builtins.getEnv "_NIX_TEST_BUILD_DIR"}/config.nix"; assert foo == "foo"; diff --git a/tests/functional/why-depends.sh b/tests/functional/why-depends.sh index ce53546d8..45d1f2f0b 100755 --- a/tests/functional/why-depends.sh +++ b/tests/functional/why-depends.sh @@ -4,7 +4,7 @@ source common.sh clearStoreIfPossible -cp ./dependencies.nix ./dependencies.builder0.sh ./config.nix $TEST_HOME +cp ./dependencies.nix ./dependencies.builder0.sh "${config_nix}" $TEST_HOME cd $TEST_HOME diff --git a/tests/functional/zstd.sh b/tests/functional/zstd.sh index 90fe58539..450fb7d35 100755 --- a/tests/functional/zstd.sh +++ b/tests/functional/zstd.sh @@ -11,22 +11,22 @@ cacheURI="file://$cacheDir?compression=zstd" outPath=$(nix-build dependencies.nix --no-out-link) -nix copy --to $cacheURI $outPath +nix copy --to "$cacheURI" "$outPath" -HASH=$(nix hash path $outPath) +HASH=$(nix hash path "$outPath") clearStore clearCacheCache -nix copy --from $cacheURI $outPath --no-check-sigs +nix copy --from "$cacheURI" "$outPath" --no-check-sigs -if ls $cacheDir/nar/*.zst &> /dev/null; then +if ls "$cacheDir/nar/"*.zst &> /dev/null; then echo "files do exist" else echo "nars do not exist" exit 1 fi -HASH2=$(nix hash path $outPath) +HASH2=$(nix hash path "$outPath") -[[ $HASH = $HASH2 ]] +[[ "$HASH" = "$HASH2" ]] diff --git a/tests/nixos/cgroups/default.nix b/tests/nixos/cgroups/default.nix new file mode 100644 index 000000000..b8febbf4b --- /dev/null +++ b/tests/nixos/cgroups/default.nix @@ -0,0 +1,40 @@ +{ nixpkgs, ... }: + +{ + name = "cgroups"; + + nodes = + { + host = + { config, pkgs, ... }: + { virtualisation.additionalPaths = [ pkgs.stdenvNoCC ]; + nix.extraOptions = + '' + extra-experimental-features = nix-command auto-allocate-uids cgroups + extra-system-features = uid-range + ''; + nix.settings.use-cgroups = true; + nix.nixPath = [ "nixpkgs=${nixpkgs}" ]; + }; + }; + + testScript = { nodes }: '' + start_all() + + host.wait_for_unit("multi-user.target") + + # Start build in background + host.execute("NIX_REMOTE=daemon nix build --auto-allocate-uids --file ${./hang.nix} >&2 &") + service = "/sys/fs/cgroup/system.slice/nix-daemon.service" + + # Wait for cgroups to be created + host.succeed(f"until [ -e {service}/nix-daemon ]; do sleep 1; done", timeout=30) + host.succeed(f"until [ -e {service}/nix-build-uid-* ]; do sleep 1; done", timeout=30) + + # Check that there aren't processes where there shouldn't be, and that there are where there should be + host.succeed(f'[ -z "$(cat {service}/cgroup.procs)" ]') + host.succeed(f'[ -n "$(cat {service}/nix-daemon/cgroup.procs)" ]') + host.succeed(f'[ -n "$(cat {service}/nix-build-uid-*/cgroup.procs)" ]') + ''; + +} diff --git a/tests/nixos/cgroups/hang.nix b/tests/nixos/cgroups/hang.nix new file mode 100644 index 000000000..cefe2d031 --- /dev/null +++ b/tests/nixos/cgroups/hang.nix @@ -0,0 +1,10 @@ +{ }: + +with import {}; + +runCommand "hang" + { requiredSystemFeatures = "uid-range"; + } + '' + sleep infinity + '' diff --git a/tests/nixos/chroot-store.nix b/tests/nixos/chroot-store.nix new file mode 100644 index 000000000..4b167fc38 --- /dev/null +++ b/tests/nixos/chroot-store.nix @@ -0,0 +1,31 @@ +{ lib, config, nixpkgs, ... }: + +let + pkgs = config.nodes.machine.nixpkgs.pkgs; + pkgA = pkgs.hello; + pkgB = pkgs.cowsay; +in { + name = "chroot-store"; + + nodes = + { machine = + { config, lib, pkgs, ... }: + { virtualisation.writableStore = true; + virtualisation.additionalPaths = [ pkgA ]; + environment.systemPackages = [ pkgB ]; + nix.extraOptions = "experimental-features = nix-command"; + }; + }; + + testScript = { nodes }: '' + # fmt: off + start_all() + + machine.succeed("nix copy --no-check-sigs --to /tmp/nix ${pkgA}") + + machine.succeed("nix shell --store /tmp/nix ${pkgA} --command hello >&2") + + # Test that /nix/store is available via an overlayfs mount. + machine.succeed("nix shell --store /tmp/nix ${pkgA} --command cowsay foo >&2") + ''; +} diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 66174c928..17bfdea38 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -4,20 +4,25 @@ let nixos-lib = import (nixpkgs + "/nixos/lib") { }; + noTests = pkg: pkg.overrideAttrs ( + finalAttrs: prevAttrs: { + doCheck = false; + doInstallCheck = false; + }); + # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests runNixOSTestFor = system: test: (nixos-lib.runTest { imports = [ test - - # Add the quickBuild attribute to the check packages - ./quick-build.nix ]; hostPkgs = nixpkgsFor.${system}.native; defaults = { nixpkgs.pkgs = nixpkgsFor.${system}.native; nix.checkAllErrors = false; + # TODO: decide which packaging stage to use. `nix-cli` is efficient, but not the same as the user-facing `everything.nix` package (`default`). Perhaps a good compromise is `everything.nix` + `noTests` defined above? + nix.package = nixpkgsFor.${system}.native.nixComponents.nix-cli; }; _module.args.nixpkgs = nixpkgs; _module.args.system = system; @@ -29,7 +34,9 @@ let forNix = nixVersion: runNixOSTestFor system { imports = [test]; defaults.nixpkgs.overlays = [(curr: prev: { - nix = (builtins.getFlake "nix/${nixVersion}").packages.${system}.nix; + nix = let + packages = (builtins.getFlake "nix/${nixVersion}").packages.${system}; + in packages.nix-cli or packages.nix; })]; }; }; @@ -148,4 +155,12 @@ in user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing; s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix; + + fsync = runNixOSTestFor "x86_64-linux" ./fsync.nix; + + cgroups = runNixOSTestFor "x86_64-linux" ./cgroups; + + fetchurl = runNixOSTestFor "x86_64-linux" ./fetchurl.nix; + + chrootStore = runNixOSTestFor "x86_64-linux" ./chroot-store.nix; } diff --git a/tests/nixos/fetchurl.nix b/tests/nixos/fetchurl.nix new file mode 100644 index 000000000..243c0cacc --- /dev/null +++ b/tests/nixos/fetchurl.nix @@ -0,0 +1,84 @@ +# Test whether builtin:fetchurl properly performs TLS certificate +# checks on HTTPS servers. + +{ pkgs, ... }: + +let + + makeTlsCert = name: pkgs.runCommand name { + nativeBuildInputs = with pkgs; [ openssl ]; + } '' + mkdir -p $out + openssl req -x509 \ + -subj '/CN=${name}/' -days 49710 \ + -addext 'subjectAltName = DNS:${name}' \ + -keyout "$out/key.pem" -newkey ed25519 \ + -out "$out/cert.pem" -noenc + ''; + + goodCert = makeTlsCert "good"; + badCert = makeTlsCert "bad"; + +in + +{ + name = "nss-preload"; + + nodes = { + machine = { pkgs, ... }: { + services.nginx = { + enable = true; + + virtualHosts."good" = { + addSSL = true; + sslCertificate = "${goodCert}/cert.pem"; + sslCertificateKey = "${goodCert}/key.pem"; + root = pkgs.runCommand "nginx-root" {} '' + mkdir "$out" + echo 'hello world' > "$out/index.html" + ''; + }; + + virtualHosts."bad" = { + addSSL = true; + sslCertificate = "${badCert}/cert.pem"; + sslCertificateKey = "${badCert}/key.pem"; + root = pkgs.runCommand "nginx-root" {} '' + mkdir "$out" + echo 'foobar' > "$out/index.html" + ''; + }; + }; + + security.pki.certificateFiles = [ "${goodCert}/cert.pem" ]; + + networking.hosts."127.0.0.1" = [ "good" "bad" ]; + + virtualisation.writableStore = true; + + nix.settings.experimental-features = "nix-command"; + }; + }; + + testScript = '' + machine.wait_for_unit("nginx") + machine.wait_for_open_port(443) + + out = machine.succeed("curl https://good/index.html") + assert out == "hello world\n" + + out = machine.succeed("cat ${badCert}/cert.pem > /tmp/cafile.pem; curl --cacert /tmp/cafile.pem https://bad/index.html") + assert out == "foobar\n" + + # Fetching from a server with a trusted cert should work. + machine.succeed("nix build --no-substitute --expr 'import { url = \"https://good/index.html\"; hash = \"sha256-qUiQTy8PR5uPgZdpSzAYSw0u0cHNKh7A+4XSmaGSpEc=\"; }'") + + # Fetching from a server with an untrusted cert should fail. + err = machine.fail("nix build --no-substitute --expr 'import { url = \"https://bad/index.html\"; hash = \"sha256-rsBwZF/lPuOzdjBZN2E08FjMM3JHyXit0Xi2zN+wAZ8=\"; }' 2>&1") + print(err) + assert "SSL certificate problem: self-signed certificate" in err + + # Fetching from a server with a trusted cert should work via environment variable override. + machine.succeed("NIX_SSL_CERT_FILE=/tmp/cafile.pem nix build --no-substitute --expr 'import { url = \"https://bad/index.html\"; hash = \"sha256-rsBwZF/lPuOzdjBZN2E08FjMM3JHyXit0Xi2zN+wAZ8=\"; }'") + ''; +} diff --git a/tests/nixos/fsync.nix b/tests/nixos/fsync.nix new file mode 100644 index 000000000..99ac2b25d --- /dev/null +++ b/tests/nixos/fsync.nix @@ -0,0 +1,39 @@ +{ lib, config, nixpkgs, pkgs, ... }: + +let + pkg1 = pkgs.go; +in + +{ + name = "fsync"; + + nodes.machine = + { config, lib, pkgs, ... }: + { virtualisation.emptyDiskImages = [ 1024 ]; + environment.systemPackages = [ pkg1 ]; + nix.settings.experimental-features = [ "nix-command" ]; + nix.settings.fsync-store-paths = true; + nix.settings.require-sigs = false; + boot.supportedFilesystems = [ "ext4" "btrfs" "xfs" ]; + }; + + testScript = { nodes }: '' + # fmt: off + for fs in ("ext4", "btrfs", "xfs"): + machine.succeed("mkfs.{} {} /dev/vdb".format(fs, "-F" if fs == "ext4" else "-f")) + machine.succeed("mkdir -p /mnt") + machine.succeed("mount /dev/vdb /mnt") + machine.succeed("sync") + machine.succeed("nix copy --offline ${pkg1} --to /mnt") + machine.crash() + + machine.start() + machine.wait_for_unit("multi-user.target") + machine.succeed("mkdir -p /mnt") + machine.succeed("mount /dev/vdb /mnt") + machine.succeed("nix path-info --offline --store /mnt ${pkg1}") + machine.succeed("nix store verify --all --store /mnt --no-trust") + + machine.succeed("umount /dev/vdb") + ''; +} diff --git a/tests/nixos/functional/common.nix b/tests/nixos/functional/common.nix index 51fd76884..561271ba0 100644 --- a/tests/nixos/functional/common.nix +++ b/tests/nixos/functional/common.nix @@ -24,43 +24,42 @@ in environment.systemPackages = let run-test-suite = pkgs.writeShellApplication { name = "run-test-suite"; - runtimeInputs = [ pkgs.gnumake pkgs.jq pkgs.git ]; + runtimeInputs = [ + pkgs.meson + pkgs.ninja + pkgs.jq + pkgs.git + + # Want to avoid `/run/current-system/sw/bin/bash` because we + # want a store path. Likewise for coreutils. + pkgs.bash + pkgs.coreutils + ]; text = '' set -x + cat /proc/sys/fs/file-max ulimit -Hn ulimit -Sn - cd ~ - cp -r ${pkgs.nix.overrideAttrs (o: { - name = "nix-configured-source"; - outputs = [ "out" ]; - separateDebugInfo = false; - disallowedReferences = [ ]; - buildPhase = ":"; - checkPhase = ":"; - installPhase = '' - cp -r . $out - ''; - installCheckPhase = ":"; - fixupPhase = ":"; - doInstallCheck = true; - })} nix - chmod -R +w nix - cd nix - # Tests we don't need - echo >tests/functional/plugins/local.mk - sed -i tests/functional/local.mk \ - -e 's!nix_tests += plugins\.sh!!' \ - -e 's!nix_tests += test-libstoreconsumer\.sh!!' \ - ; + cd ~ + + cp -r ${pkgs.nixComponents.nix-functional-tests.src} nix + chmod -R +w nix + + chmod u+w nix/.version + echo ${pkgs.nixComponents.version} > nix/.version export isTestOnNixOS=1 - export version=${config.nix.package.version} + export NIX_REMOTE_=daemon export NIX_REMOTE=daemon + export NIX_STORE=${builtins.storeDir} - make -j1 installcheck --keep-going + + meson setup nix/tests/functional build + cd build + meson test -j1 --print-errorlogs ''; }; in [ diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 221045009..8e646f6dd 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -181,8 +181,14 @@ in print(out) info = json.loads(out) assert info["revision"] == "${private-flake-rev}", f"revision mismatch: {info['revision']} != ${private-flake-rev}" + assert info["fingerprint"] cat_log() + # Fetching with the resolved URL should produce the same result. + info2 = json.loads(client.succeed(f"nix flake metadata {info['url']} --json --access-tokens github.com=ghp_000000000000000000000000000000000000 --tarball-ttl 0")) + print(info["fingerprint"], info2["fingerprint"]) + assert info["fingerprint"] == info2["fingerprint"], "fingerprint mismatch" + client.succeed("nix registry pin nixpkgs") client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2") diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix index 7db5197aa..8691d0138 100644 --- a/tests/nixos/nix-copy.nix +++ b/tests/nixos/nix-copy.nix @@ -37,7 +37,8 @@ in { { config, pkgs, ... }: { services.openssh.enable = true; services.openssh.settings.PermitRootLogin = "yes"; - users.users.root.password = "foobar"; + users.users.root.hashedPasswordFile = null; + users.users.root.password = "foobar"; virtualisation.writableStore = true; virtualisation.additionalPaths = [ pkgB pkgC ]; }; @@ -64,7 +65,7 @@ in { # Copy the closure of package A from the client to the server using password authentication, # and check that all prompts are visible server.fail("nix-store --check-validity ${pkgA}") - client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo done\n") + client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo -n do; echo ne\n") client.wait_for_text("continue connecting") client.send_chars("yes\n") client.wait_for_text("Password:") diff --git a/tests/nixos/quick-build.nix b/tests/nixos/quick-build.nix deleted file mode 100644 index 57e3b9cdb..000000000 --- a/tests/nixos/quick-build.nix +++ /dev/null @@ -1,47 +0,0 @@ -test@{ lib, extendModules, ... }: -let - inherit (lib) mkOption types; -in -{ - options = { - quickBuild = mkOption { - description = '' - Whether to perform a "quick" build of the Nix package to test. - - When iterating on the functional tests, it's recommended to "set" this - to `true`, so that changes to the functional tests don't require any - recompilation of the package. - You can do so by building the `.quickBuild` attribute on the check package, - e.g: - ```console - nix build .#hydraJobs.functional_user.quickBuild - ``` - - We don't enable this by default to avoid the mostly unnecessary work of - performing an additional build of the package in cases where we build - the package normally anyway, such as in our pre-merge CI. - ''; - type = types.bool; - default = false; - }; - }; - - config = { - passthru.quickBuild = - let withQuickBuild = extendModules { modules = [{ quickBuild = true; }]; }; - in withQuickBuild.config.test; - - defaults = { pkgs, ... }: { - config = lib.mkIf test.config.quickBuild { - nix.package = pkgs.nixComponents.nix-cli; - - system.forbiddenDependenciesRegexes = [ - # This would indicate that the quickBuild feature is broken. - # It could happen if NixOS has a dependency on pkgs.nix instead of - # config.nix.package somewhere. - (builtins.unsafeDiscardStringContext pkgs.nix.outPath) - ]; - }; - }; - }; -} diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 8ddf6ad02..84e5176b7 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -34,6 +34,8 @@ let } ''; + supportsBadShell = lib.versionAtLeast config.nodes.client.nix.package.version "2.25pre"; + in { @@ -81,6 +83,17 @@ in virtualisation.additionalPaths = [ config.system.build.extraUtils ]; nix.settings.substituters = lib.mkForce [ ]; programs.ssh.extraConfig = "ConnectTimeout 30"; + environment.systemPackages = [ + # `bad-shell` is used to make sure Nix works in an environment with a misbehaving shell. + # + # More realistically, a bad shell would still run the command ("echo started") + # but considering that our solution is to avoid this shell (set via $SHELL), we + # don't need to bother with a more functional mock shell. + (pkgs.writeScriptBin "bad-shell" '' + #!${pkgs.runtimeShell} + echo "Hello, I am a broken shell" + '') + ]; }; }; @@ -114,9 +127,15 @@ in 'echo hello world on $(hostname)' >&2 """) + ${lib.optionalString supportsBadShell '' + # Check that SSH uses SHELL for LocalCommand, as expected, and check that + # our test setup here is working. The next test will use this bad SHELL. + client.succeed(f"SHELL=$(which bad-shell) ssh -oLocalCommand='true' -oPermitLocalCommand=yes {builder1.name} 'echo hello world' | grep -F 'Hello, I am a broken shell'") + ''} + # Perform a build and check that it was performed on the builder. out = client.succeed( - "nix-build ${expr nodes.client 1} 2> build-output", + "${lib.optionalString supportsBadShell "SHELL=$(which bad-shell)"} nix-build ${expr nodes.client 1} 2> build-output", "grep -q Hello build-output" ) builder1.succeed(f"test -e {out}") diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index 015457968..6c51fcba5 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -12,7 +12,7 @@ let storeUrl = "s3://my-cache?endpoint=http://server:9000®ion=eu-west-1"; in { - name = "nix-copy-closure"; + name = "s3-binary-cache-store"; nodes = { server = @@ -51,6 +51,9 @@ in { server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") + # Test fetchurl on s3:// URLs while we're at it. + client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000®ion=eu-west-1\"; }'") + # Copy a package from the binary cache. client.fail("nix path-info ${pkgA}") diff --git a/tests/unit/libexpr-support/.version b/tests/unit/libexpr-support/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libexpr-support/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libexpr-support/build-utils-meson b/tests/unit/libexpr-support/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libexpr-support/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libexpr/.version b/tests/unit/libexpr/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libexpr/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libexpr/build-utils-meson b/tests/unit/libexpr/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libexpr/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libfetchers/.version b/tests/unit/libfetchers/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libfetchers/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libfetchers/build-utils-meson b/tests/unit/libfetchers/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libfetchers/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libflake/.version b/tests/unit/libflake/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libflake/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libflake/build-utils-meson b/tests/unit/libflake/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libflake/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libstore-support/.version b/tests/unit/libstore-support/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libstore-support/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libstore-support/build-utils-meson b/tests/unit/libstore-support/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libstore-support/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libstore/.version b/tests/unit/libstore/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libstore/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libstore/build-utils-meson b/tests/unit/libstore/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libstore/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes-defaults.drv b/tests/unit/libstore/data/derivation/advanced-attributes-defaults.drv deleted file mode 120000 index 353090ad8..000000000 --- a/tests/unit/libstore/data/derivation/advanced-attributes-defaults.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../functional/derivation/advanced-attributes-defaults.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs-defaults.drv b/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs-defaults.drv deleted file mode 120000 index 11713da12..000000000 --- a/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs-defaults.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../functional/derivation/advanced-attributes-structured-attrs-defaults.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs.drv b/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs.drv deleted file mode 120000 index 962f8ea3f..000000000 --- a/tests/unit/libstore/data/derivation/advanced-attributes-structured-attrs.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../functional/derivation/advanced-attributes-structured-attrs.drv \ No newline at end of file diff --git a/tests/unit/libstore/data/derivation/advanced-attributes.drv b/tests/unit/libstore/data/derivation/advanced-attributes.drv deleted file mode 120000 index 2a53a05ca..000000000 --- a/tests/unit/libstore/data/derivation/advanced-attributes.drv +++ /dev/null @@ -1 +0,0 @@ -../../../../functional/derivation/advanced-attributes.drv \ No newline at end of file diff --git a/tests/unit/libutil-support/.version b/tests/unit/libutil-support/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libutil-support/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libutil-support/build-utils-meson b/tests/unit/libutil-support/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libutil-support/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file diff --git a/tests/unit/libutil/.version b/tests/unit/libutil/.version deleted file mode 120000 index 0df9915bf..000000000 --- a/tests/unit/libutil/.version +++ /dev/null @@ -1 +0,0 @@ -../../../.version \ No newline at end of file diff --git a/tests/unit/libutil/build-utils-meson b/tests/unit/libutil/build-utils-meson deleted file mode 120000 index f2d8e8a50..000000000 --- a/tests/unit/libutil/build-utils-meson +++ /dev/null @@ -1 +0,0 @@ -../../../build-utils-meson/ \ No newline at end of file