diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 4488c7b7d..217b19108 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,24 +10,6 @@ -# Checklist for maintainers - - - -Maintainers: tick if completed or explain if not relevant - - - [ ] agreed on idea - - [ ] agreed on implementation strategy - - [ ] tests, as appropriate - - functional tests - `tests/**.sh` - - unit tests - `src/*/tests` - - integration tests - `tests/nixos/*` - - [ ] documentation in the manual - - [ ] documentation in the internal API docs - - [ ] code and comments are self-explanatory - - [ ] commit message explains why the change was made - - [ ] new feature or incompatible change: updated release notes - # Priorities Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc). diff --git a/.github/labeler.yml b/.github/labeler.yml index 12120bdb3..7544f07a6 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -20,4 +20,4 @@ # Unit tests - src/*/tests/**/* # Functional and integration tests - - tests/**/* + - tests/functional/**/* diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 816474ed5..12c60c649 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -14,14 +14,14 @@ jobs: if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} # required to find all branches fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v1.3.1 + uses: zeebe-io/backport-action@v1.4.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c3a17d106..afe4dc2e3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,10 +17,10 @@ jobs: runs-on: ${{ matrix.os }} timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v22 + - uses: cachix/install-nix-action@v23 with: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" @@ -58,11 +58,11 @@ jobs: outputs: installerURL: ${{ steps.prepare-installer.outputs.installerURL }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v22 + - uses: cachix/install-nix-action@v23 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - uses: cachix/cachix-action@v12 @@ -82,9 +82,9 @@ jobs: os: [ubuntu-latest, macos-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v22 + - uses: cachix/install-nix-action@v23 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" @@ -101,6 +101,9 @@ jobs: docker_push_image: needs: [check_secrets, tests] + permissions: + contents: read + packages: write if: >- github.event_name == 'push' && github.ref_name == 'master' && @@ -108,10 +111,10 @@ jobs: needs.check_secrets.outputs.docker == 'true' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v22 + - uses: cachix/install-nix-action@v23 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV @@ -126,10 +129,30 @@ jobs: - run: docker load -i ./result/image.tar.gz - run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION - run: docker tag nix:$NIX_VERSION nixos/nix:master + # We'll deploy the newly built image to both Docker Hub and Github Container Registry. + # + # Push to Docker Hub first - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - run: docker push nixos/nix:$NIX_VERSION - run: docker push nixos/nix:master + # Push to GitHub Container Registry as well + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Push image + run: | + IMAGE_ID=ghcr.io/${{ github.repository_owner }}/nix + # Change all uppercase to lowercase + IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]') + + docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION + docker tag nix:$NIX_VERSION $IMAGE_ID:master + docker push $IMAGE_ID:$NIX_VERSION + docker push $IMAGE_ID:master diff --git a/.github/workflows/hydra_status.yml b/.github/workflows/hydra_status.yml index 38a9c0877..2a7574747 100644 --- a/.github/workflows/hydra_status.yml +++ b/.github/workflows/hydra_status.yml @@ -13,7 +13,7 @@ jobs: if: github.repository_owner == 'NixOS' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - run: bash scripts/check-hydra-status.sh diff --git a/.gitignore b/.gitignore index 93a9ff9ae..04d96ca2c 100644 --- a/.gitignore +++ b/.gitignore @@ -79,24 +79,24 @@ perl/Makefile.config /src/build-remote/build-remote -# /tests/ -/tests/test-tmp -/tests/common/vars-and-functions.sh -/tests/result* -/tests/restricted-innocent -/tests/shell -/tests/shell.drv -/tests/config.nix -/tests/ca/config.nix -/tests/dyn-drv/config.nix -/tests/repl-result-out -/tests/test-libstoreconsumer/test-libstoreconsumer +# /tests/functional/ +/tests/functional/test-tmp +/tests/functional/common/vars-and-functions.sh +/tests/functional/result* +/tests/functional/restricted-innocent +/tests/functional/shell +/tests/functional/shell.drv +/tests/functional/config.nix +/tests/functional/ca/config.nix +/tests/functional/dyn-drv/config.nix +/tests/functional/repl-result-out +/tests/functional/test-libstoreconsumer/test-libstoreconsumer -# /tests/lang/ -/tests/lang/*.out -/tests/lang/*.out.xml -/tests/lang/*.err -/tests/lang/*.ast +# /tests/functional/lang/ +/tests/functional/lang/*.out +/tests/functional/lang/*.out.xml +/tests/functional/lang/*.err +/tests/functional/lang/*.ast /perl/lib/Nix/Config.pm /perl/lib/Nix/Store.cc @@ -138,4 +138,9 @@ nix-rust/target result +# IDE .vscode/ +.idea/ + +# clangd and possibly more +.cache/ diff --git a/.version b/.version index cf8690732..ef0f38abe 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.18.0 +2.19.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4a72a8eac..ffcc0268f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -24,30 +24,51 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). ## Making changes to Nix -1. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make. - There are many open pull requests that might already do what you intent to work on. +1. Search for related issues that cover what you're going to work on. + It could help to mention there that you will work on the issue. + + Issues labeled [good first issue](https://github.com/NixOS/nix/labels/good%20first%20issue) should be relatively easy to fix and are likely to get merged quickly. + Pull requests addressing issues labeled [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) or [RFC](https://github.com/NixOS/nix/labels/RFC) are especially welcomed by maintainers and will receive prioritised review. + + If you are proficient with C++, addressing one of the [popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc) will be highly appreciated by maintainers and Nix users all over the world. + For far-reaching changes, please investigate possible blockers and design implications, and coordinate with maintainers before investing too much time in writing code that may not end up getting merged. + + If there is no relevant issue yet and you're not sure whether your change is likely to be accepted, [open an issue](https://github.com/NixOS/nix/issues/new/choose) yourself. + +2. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make. + There are many open pull requests that might already do what you intend to work on. You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics. -2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue. - - Issues labeled ["good first issue"](https://github.com/NixOS/nix/labels/good-first-issue) should be relatively easy to fix and are likely to get merged quickly. - Pull requests addressing issues labeled ["idea approved"](https://github.com/NixOS/nix/labels/idea%20approved) are especially welcomed by maintainers and will receive prioritised review. - 3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests. For contributions to the command line interface, please check the [CLI guidelines](https://nixos.org/manual/nix/unstable/contributing/cli-guideline.html). -4. Make your changes! +4. Make your change! 5. [Create a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) for your changes. - * [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes. + * Clearly explain the problem that you're solving. + + Link related issues to inform interested parties and future contributors about your change. + If your pull request closes one or multiple issues, mention that in the description using `Closes: #`, as it will then happen automatically when your change is merged. * Make sure to have [a clean history of commits on your branch by using rebase](https://www.digitalocean.com/community/tutorials/how-to-rebase-and-update-a-pull-request). - * Link related issues in your pull request to inform interested parties and future contributors about your change. - If your pull request closes one or multiple issues, note that in the description using `Closes: #`, as it will then happen automatically when your change is merged. + * [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes. 6. Do not expect your pull request to be reviewed immediately. Nix maintainers follow a [structured process for reviews and design decisions](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol), which may or may not prioritise your work. + Following this checklist will make the process smoother for everyone: + + - [ ] Fixes an [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) issue + - [ ] Tests, as appropriate: + - Functional tests – [`tests/functional/**.sh`](./tests/functional) + - Unit tests – [`src/*/tests`](./src/) + - Integration tests – [`tests/nixos/*`](./tests/nixos) + - [ ] User documentation in the [manual](..doc/manual/src) + - [ ] API documentation in header files + - [ ] Code and comments are self-explanatory + - [ ] Commit message explains **why** the change was made + - [ ] New feature or incompatible change: updated [release notes](./doc/manual/src/release-notes/rl-next.md) + 7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams). ## Making changes to the Nix manual diff --git a/Makefile b/Makefile index ea15b03db..24af7ae2b 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,7 @@ +-include Makefile.config +clean-files += Makefile.config + +ifeq ($(ENABLE_BUILD), yes) makefiles = \ mk/precompiled-headers.mk \ local.mk \ @@ -18,20 +22,24 @@ makefiles = \ misc/upstart/local.mk \ doc/manual/local.mk \ doc/internal-api/local.mk +endif --include Makefile.config - -ifeq ($(tests), yes) +ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes) +UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data makefiles += \ src/libutil/tests/local.mk \ src/libstore/tests/local.mk \ - src/libexpr/tests/local.mk \ - tests/local.mk \ - tests/ca/local.mk \ - tests/dyn-drv/local.mk \ - tests/local-overlay-store/local.mk \ - tests/test-libstoreconsumer/local.mk \ - tests/plugins/local.mk + src/libexpr/tests/local.mk +endif + +ifeq ($(ENABLE_TESTS), yes) +makefiles += \ + tests/functional/local.mk \ + tests/functional/ca/local.mk \ + tests/functional/dyn-drv/local.mk \ + tests/functional/local-overlay-store/local.mk \ + tests/functional/test-libstoreconsumer/local.mk \ + tests/functional/plugins/local.mk else makefiles += \ mk/disable-tests.mk diff --git a/Makefile.config.in b/Makefile.config.in index 707cfe0e3..19992fa20 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -46,5 +46,6 @@ sandbox_shell = @sandbox_shell@ storedir = @storedir@ sysconfdir = @sysconfdir@ system = @system@ -tests = @tests@ +ENABLE_BUILD = @ENABLE_BUILD@ +ENABLE_TESTS = @ENABLE_TESTS@ internal_api_docs = @internal_api_docs@ diff --git a/README.md b/README.md index 85b0902b1..623a9722c 100644 --- a/README.md +++ b/README.md @@ -7,29 +7,27 @@ Nix is a powerful package manager for Linux and other Unix systems that makes pa management reliable and reproducible. Please refer to the [Nix manual](https://nixos.org/nix/manual) for more details. -## Installation +## Installation and first steps -On Linux and macOS the easiest way to install Nix is to run the following shell command -(as a user other than root): +Visit [nix.dev](https://nix.dev) for [installation instructions](https://nix.dev/tutorials/install-nix) and [beginner tutorials](https://nix.dev/tutorials/first-steps). -```console -$ curl -L https://nixos.org/nix/install | sh -``` - -Information on additional installation methods is available on the [Nix download page](https://nixos.org/download.html). +Full reference documentation can be found in the [Nix manual](https://nixos.org/nix/manual). ## Building And Developing See our [Hacking guide](https://nixos.org/manual/nix/unstable/contributing/hacking.html) in our manual for instruction on how to to set up a development environment and build Nix from source. +## Contributing + +Check the [contributing guide](./CONTRIBUTING.md) if you want to get involved with developing Nix. + ## Additional Resources - [Nix manual](https://nixos.org/nix/manual) - [Nix jobsets on hydra.nixos.org](https://hydra.nixos.org/project/nix) - [NixOS Discourse](https://discourse.nixos.org/) - [Matrix - #nix:nixos.org](https://matrix.to/#/#nix:nixos.org) -- [IRC - #nixos on libera.chat](irc://irc.libera.chat/#nixos) ## License diff --git a/boehmgc-coroutine-sp-fallback.diff b/boehmgc-coroutine-sp-fallback.diff index 5066d8278..2afbe9671 100644 --- a/boehmgc-coroutine-sp-fallback.diff +++ b/boehmgc-coroutine-sp-fallback.diff @@ -59,12 +59,18 @@ index b5d71e62..aed7b0bf 100644 GC_bool found_me = FALSE; size_t nthreads = 0; int i; -@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void) +@@ -851,6 +853,37 @@ GC_INNER void GC_push_all_stacks(void) hi = p->altstack + p->altstack_size; /* FIXME: Need to scan the normal stack too, but how ? */ /* FIXME: Assume stack grows down */ + } else { -+ if (pthread_getattr_np(p->id, &pattr)) { ++#ifdef HAVE_PTHREAD_ATTR_GET_NP ++ if (!pthread_attr_init(&pattr) ++ || !pthread_attr_get_np(p->id, &pattr)) ++#else /* HAVE_PTHREAD_GETATTR_NP */ ++ if (pthread_getattr_np(p->id, &pattr)) ++#endif ++ { + ABORT("GC_push_all_stacks: pthread_getattr_np failed!"); + } + if (pthread_attr_getstacksize(&pattr, &stack_limit)) { diff --git a/bootstrap.sh b/bootstrap.sh deleted file mode 100755 index e3e259351..000000000 --- a/bootstrap.sh +++ /dev/null @@ -1,4 +0,0 @@ -#! /bin/sh -e -rm -f aclocal.m4 -mkdir -p config -exec autoreconf -vfi diff --git a/configure.ac b/configure.ac index 6d78237f0..225baf6b5 100644 --- a/configure.ac +++ b/configure.ac @@ -152,12 +152,17 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then LDFLAGS="-latomic $LDFLAGS" fi +# Running the functional tests without building Nix is useful for testing +# different pre-built versions of Nix against each other. +AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), + ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) +AC_SUBST(ENABLE_BUILD) # Building without tests is useful for bootstrapping with a smaller footprint # or running the tests in a separate derivation. Otherwise, we do compile and # run them. AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]), - tests=$enableval, tests=yes) -AC_SUBST(tests) + ENABLE_TESTS=$enableval, ENABLE_TESTS=yes) +AC_SUBST(ENABLE_TESTS) # Building without API docs is the default as Nix' C++ interfaces are internal and unstable. AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), @@ -289,7 +294,7 @@ if test "$gc" = yes; then fi -if test "$tests" = yes; then +if test "$ENABLE_TESTS" = yes; then # Look for gtest. PKG_CHECK_MODULES([GTEST], [gtest_main]) diff --git a/doc/internal-api/doxygen.cfg.in b/doc/internal-api/doxygen.cfg.in index 8f526536d..599be2470 100644 --- a/doc/internal-api/doxygen.cfg.in +++ b/doc/internal-api/doxygen.cfg.in @@ -54,6 +54,23 @@ INPUT = \ src/nix-env \ src/nix-store +# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names +# in the source code. If set to NO, only conditional compilation will be +# performed. Macro expansion can be done in a controlled way by setting +# EXPAND_ONLY_PREDEF to YES. +# The default value is: NO. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then +# the macro expansion is limited to the macros specified with the PREDEFINED and +# EXPAND_AS_DEFINED tags. +# The default value is: NO. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +EXPAND_ONLY_PREDEF = YES + # The INCLUDE_PATH tag can be used to specify one or more directories that # contain include files that are not input files but should be processed by the # preprocessor. Note that the INCLUDE_PATH is not recursive, so the setting of @@ -61,3 +78,16 @@ INPUT = \ # This tag requires that the tag SEARCH_INCLUDES is set to YES. INCLUDE_PATH = @RAPIDCHECK_HEADERS@ + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this +# tag can be used to specify a list of macro names that should be expanded. The +# macro definition that is found in the sources will be used. Use the PREDEFINED +# tag if you want to use a different macro definition that overrules the +# definition found in the source code. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +EXPAND_AS_DEFINED = \ + DECLARE_COMMON_SERIALISER \ + DECLARE_WORKER_SERIALISER \ + DECLARE_SERVE_SERIALISER \ + LENGTH_PREFIXED_PROTO_HELPER diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index 65eec42d0..14136016d 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -1,11 +1,12 @@ let inherit (builtins) - attrNames attrValues fromJSON listToAttrs mapAttrs + attrNames attrValues fromJSON listToAttrs mapAttrs groupBy concatStringsSep concatMap length lessThan replaceStrings sort; - inherit (import ./utils.nix) concatStrings optionalString filterAttrs trim squash unique showSettings; + inherit (import ) attrsToList concatStrings optionalString filterAttrs trim squash unique; + showStoreDocs = import ./generate-store-info.nix; in -commandDump: +inlineHTML: commandDump: let @@ -30,7 +31,7 @@ let ${maybeSubcommands} - ${maybeDocumentation} + ${maybeStoreDocs} ${maybeOptions} ''; @@ -40,15 +41,15 @@ let showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "..."; arguments = concatStringsSep " " (map showArgument args); in '' - `${command}` [*option*...] ${arguments} + `${command}` [*option*...] ${arguments} ''; maybeSubcommands = optionalString (details ? commands && details.commands != {}) - '' - where *subcommand* is one of the following: + '' + where *subcommand* is one of the following: - ${subcommands} - ''; + ${subcommands} + ''; subcommands = if length categories > 1 then listCategories @@ -70,40 +71,57 @@ let * [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description} ''; - maybeDocumentation = optionalString - (details ? doc) - (replaceStrings ["@stores@"] [storeDocs] details.doc); + # FIXME: this is a hack. + # store parameters should not be part of command documentation to begin + # with, but instead be rendered on separate pages. + maybeStoreDocs = optionalString (details ? doc) + (replaceStrings [ "@stores@" ] [ (showStoreDocs inlineHTML commandInfo.stores) ] details.doc); - maybeOptions = optionalString (details.flags != {}) '' + maybeOptions = let + allVisibleOptions = filterAttrs + (_: o: ! o.hiddenCategory) + (details.flags // toplevel.flags); + in optionalString (allVisibleOptions != {}) '' # Options - ${showOptions details.flags toplevel.flags} + ${showOptions inlineHTML allVisibleOptions} + + > **Note** + > + > See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags. ''; - showOptions = options: commonOptions: + showOptions = inlineHTML: allOptions: let - allOptions = options // commonOptions; - showCategory = cat: '' - ${optionalString (cat != "") "**${cat}:**"} + showCategory = cat: opts: '' + ${optionalString (cat != "") "## ${cat}"} - ${listOptions (filterAttrs (n: v: v.category == cat) allOptions)} + ${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))} ''; - listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts)); showOption = name: option: let + result = trim '' + - ${item} + + ${option.description} + ''; + item = if inlineHTML + then ''[`--${name}`](#opt-${name}) ${shortName} ${labels}'' + else "`--${name}` ${shortName} ${labels}"; shortName = optionalString (option ? shortName) ("/ `-${option.shortName}`"); labels = optionalString (option ? labels) (concatStringsSep " " (map (s: "*${s}*") option.labels)); - in trim '' - - `--${name}` ${shortName} ${labels} - - ${option.description} - ''; - categories = sort lessThan (unique (map (cmd: cmd.category) (attrValues allOptions))); - in concatStrings (map showCategory categories); + in result; + categories = mapAttrs + # Convert each group from a list of key-value pairs back to an attrset + (_: listToAttrs) + (groupBy + (cmd: cmd.value.category) + (attrsToList allOptions)); + in concatStrings (attrValues (mapAttrs showCategory categories)); in squash result; appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name; @@ -135,35 +153,4 @@ let " - [${page.command}](command-ref/new-cli/${page.name})"; in concatStringsSep "\n" (map showEntry manpages) + "\n"; - storeDocs = - let - showStore = name: { settings, doc, experimentalFeature }: - let - experimentalFeatureNote = optionalString (experimentalFeature != null) '' - > **Warning** - > This store is part of an - > [experimental feature](@docroot@/contributing/experimental-features.md). - - To use this store, you need to make sure the corresponding experimental feature, - [`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}), - is enabled. - For example, include the following in [`nix.conf`](#): - - ``` - extra-experimental-features = ${experimentalFeature} - ``` - ''; - in '' - ## ${name} - - ${doc} - - ${experimentalFeatureNote} - - **Settings**: - - ${showSettings { useAnchors = false; } settings} - ''; - in concatStrings (attrValues (mapAttrs showStore commandInfo.stores)); - in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; } diff --git a/doc/manual/generate-settings.nix b/doc/manual/generate-settings.nix new file mode 100644 index 000000000..8736bb793 --- /dev/null +++ b/doc/manual/generate-settings.nix @@ -0,0 +1,66 @@ +let + inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs; + inherit (import ./utils.nix) concatStrings indent optionalString squash; +in + +# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown` +{ prefix, inlineHTML ? true }: settingsInfo: + +let + + showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }: + let + result = squash '' + - ${item} + + ${indent " " body} + ''; + item = if inlineHTML + then ''[`${setting}`](#${prefix}-${setting})'' + else "`${setting}`"; + # separate body to cleanly handle indentation + body = '' + ${description} + + ${experimentalFeatureNote} + + **Default:** ${showDefault documentDefault defaultValue} + + ${showAliases aliases} + ''; + + experimentalFeatureNote = optionalString (experimentalFeature != null) '' + > **Warning** + > This setting is part of an + > [experimental feature](@docroot@/contributing/experimental-features.md). + + To change this setting, you need to make sure the corresponding experimental feature, + [`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}), + is enabled. + For example, include the following in [`nix.conf`](#): + + ``` + extra-experimental-features = ${experimentalFeature} + ${setting} = ... + ``` + ''; + + showDefault = documentDefault: defaultValue: + if documentDefault then + # a StringMap value type is specified as a string, but + # this shows the value type. The empty stringmap is `null` in + # JSON, but that converts to `{ }` here. + if defaultValue == "" || defaultValue == [] || isAttrs defaultValue + then "*empty*" + else if isBool defaultValue then + if defaultValue then "`true`" else "`false`" + else "`${toString defaultValue}`" + else "*machine-specific*"; + + showAliases = aliases: + optionalString (aliases != []) + "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}"; + + in result; + +in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo)) diff --git a/doc/manual/generate-store-info.nix b/doc/manual/generate-store-info.nix new file mode 100644 index 000000000..36215aadf --- /dev/null +++ b/doc/manual/generate-store-info.nix @@ -0,0 +1,45 @@ +let + inherit (builtins) attrValues mapAttrs; + inherit (import ./utils.nix) concatStrings optionalString; + showSettings = import ./generate-settings.nix; +in + +inlineHTML: storesInfo: + +let + + showStore = name: { settings, doc, experimentalFeature }: + let + + result = '' + ## ${name} + + ${doc} + + ${experimentalFeatureNote} + + ### Settings + + ${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings} + ''; + + # markdown doesn't like spaces in URLs + slug = builtins.replaceStrings [ " " ] [ "-" ] name; + + experimentalFeatureNote = optionalString (experimentalFeature != null) '' + > **Warning** + > This store is part of an + > [experimental feature](@docroot@/contributing/experimental-features.md). + + To use this store, you need to make sure the corresponding experimental feature, + [`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}), + is enabled. + For example, include the following in [`nix.conf`](#): + + ``` + extra-experimental-features = ${experimentalFeature} + ``` + ''; + in result; + +in concatStrings (attrValues (mapAttrs showStore storesInfo)) diff --git a/doc/manual/local.mk b/doc/manual/local.mk index abdfd6a62..8bf16e9dd 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -32,7 +32,7 @@ dummy-env = env -i \ NIX_STATE_DIR=/dummy \ NIX_CONFIG='cores = 0' -nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw +nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw # re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution define process-includes @@ -96,14 +96,14 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/sr @cp $< $@ @$(call process-includes,$@,$@) -$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(bindir)/nix +$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(bindir)/nix @rm -rf $@ $@.tmp - $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)' + $(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)' @mv $@.tmp $@ -$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix +$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp - $(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { useAnchors = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; + $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "opt-"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; @mv $@.tmp $@ $(d)/nix.json: $(bindir)/nix @@ -125,7 +125,7 @@ $(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $ @mv $@.tmp $@ $(d)/xp-features.json: $(bindir)/nix - $(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-xp-features > $@.tmp + $(trace-gen) $(dummy-env) $(bindir)/nix __dump-xp-features > $@.tmp @mv $@.tmp $@ $(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix @@ -141,7 +141,7 @@ $(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin @mv $@.tmp $@ $(d)/language.json: $(bindir)/nix - $(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-language > $@.tmp + $(trace-gen) $(dummy-env) $(bindir)/nix __dump-language > $@.tmp @mv $@.tmp $@ # Generate the HTML manual. @@ -173,6 +173,10 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli done @touch $@ +# the `! -name 'contributing.md'` filter excludes the one place where +# `@docroot@` is to be preserved for documenting the mechanism +# FIXME: maybe contributing guides should live right next to the code +# instead of in the manual $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md $(trace-gen) \ tmp="$$(mktemp -d)"; \ @@ -180,7 +184,7 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/ find "$$tmp" -name '*.md' | while read -r file; do \ $(call process-includes,$$file,$$file); \ done; \ - find "$$tmp" -name '*.md' | while read -r file; do \ + find "$$tmp" -name '*.md' ! -name 'documentation.md' | while read -r file; do \ docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/src)"; \ sed -i "s,@docroot@,$$docroot,g" "$$file"; \ done; \ diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index b43622ed6..d1b10109d 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -336,14 +336,13 @@ const redirects = { "simple-values": "#primitives", "lists": "#list", "strings": "#string", - "lists": "#list", "attribute-sets": "#attribute-set", }, "installation/installing-binary.html": { "linux": "uninstall.html#linux", "macos": "uninstall.html#macos", "uninstalling": "uninstall.html", - } + }, "contributing/hacking.html": { "nix-with-flakes": "#building-nix-with-flakes", "classic-nix": "#building-nix", @@ -355,6 +354,7 @@ const redirects = { "installer-tests": "testing.html#installer-tests", "one-time-setup": "testing.html#one-time-setup", "using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing", + "characterization-testing": "#characterisation-testing-unit", } }; diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 6c599abcf..60ebeb138 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -17,7 +17,6 @@ - [Upgrading Nix](installation/upgrading.md) - [Uninstalling Nix](installation/uninstall.md) - [Package Management](package-management/package-management.md) - - [Basic Package Management](package-management/basic-package-mgmt.md) - [Profiles](package-management/profiles.md) - [Garbage Collection](package-management/garbage-collection.md) - [Garbage Collector Roots](package-management/garbage-collector-roots.md) @@ -30,9 +29,11 @@ - [Data Types](language/values.md) - [Language Constructs](language/constructs.md) - [String interpolation](language/string-interpolation.md) + - [Lookup path](language/constructs/lookup-path.md) - [Operators](language/operators.md) - [Derivations](language/derivations.md) - [Advanced Attributes](language/advanced-attributes.md) + - [Import From Derivation](language/import-from-derivation.md) - [Built-in Constants](language/builtin-constants.md) - [Built-in Functions](language/builtins.md) - [Advanced Topics](advanced-topics/advanced-topics.md) @@ -100,15 +101,18 @@ - [File System Object](architecture/file-system-object.md) - [Protocols](protocols/protocols.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) + - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/contributing.md) - [Hacking](contributing/hacking.md) - [Testing](contributing/testing.md) + - [Documentation](contributing/documentation.md) - [Experimental Features](contributing/experimental-features.md) - [CLI guideline](contributing/cli-guideline.md) - [C++ style guide](contributing/cxx.md) - [Release Notes](release-notes/release-notes.md) - [Release X.Y (202?-??-??)](release-notes/rl-next.md) + - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md) - [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md) - [Release 2.15 (2023-04-11)](release-notes/rl-2.15.md) diff --git a/doc/manual/src/advanced-topics/advanced-topics.md b/doc/manual/src/advanced-topics/advanced-topics.md index 8b1378917..9a4d12a33 100644 --- a/doc/manual/src/advanced-topics/advanced-topics.md +++ b/doc/manual/src/advanced-topics/advanced-topics.md @@ -1 +1 @@ - +This section lists advanced topics related to builds and builds performance diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md index 73a113d35..507c5ecb7 100644 --- a/doc/manual/src/advanced-topics/distributed-builds.md +++ b/doc/manual/src/advanced-topics/distributed-builds.md @@ -12,14 +12,14 @@ machine is accessible via SSH and that it has Nix installed. You can test whether connecting to the remote Nix instance works, e.g. ```console -$ nix store ping --store ssh://mac +$ nix store info --store ssh://mac ``` will try to connect to the machine named `mac`. It is possible to specify an SSH identity file as part of the remote store URI, e.g. ```console -$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key +$ nix store info --store ssh://mac?ssh-key=/home/alice/my-key ``` Since builds should be non-interactive, the key should not have a diff --git a/doc/manual/src/advanced-topics/post-build-hook.md b/doc/manual/src/advanced-topics/post-build-hook.md index a251dec48..e4475bd9b 100644 --- a/doc/manual/src/advanced-topics/post-build-hook.md +++ b/doc/manual/src/advanced-topics/post-build-hook.md @@ -69,6 +69,8 @@ exec nix copy --to "s3://example-nix-cache" $OUT_PATHS > store sign`. Nix guarantees the paths will not contain any spaces, > however a store path might contain glob characters. The `set -f` > disables globbing in the shell. +> If you want to upload the `.drv` file too, the `$DRV_PATH` variable +> is also defined for the script and works just like `$OUT_PATHS`. Then make sure the hook program is executable by the `root` user: diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md index b4a9bb2a9..34e0dbfbd 100644 --- a/doc/manual/src/command-ref/env-common.md +++ b/doc/manual/src/command-ref/env-common.md @@ -2,109 +2,124 @@ Most Nix commands interpret the following environment variables: - - [`IN_NIX_SHELL`](#env-IN_NIX_SHELL)\ - Indicator that tells if the current environment was set up by - `nix-shell`. It can have the values `pure` or `impure`. +- [`IN_NIX_SHELL`](#env-IN_NIX_SHELL) - - [`NIX_PATH`](#env-NIX_PATH)\ - A colon-separated list of directories used to look up the location of Nix - expressions using [paths](@docroot@/language/values.md#type-path) - enclosed in angle brackets (i.e., ``), - e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the - [`-I` option](@docroot@/command-ref/opt-common.md#opt-I). + Indicator that tells if the current environment was set up by + `nix-shell`. It can have the values `pure` or `impure`. - If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode: +- [`NIX_PATH`](#env-NIX_PATH) - 1. `$HOME/.nix-defexpr/channels` - 2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs` - 3. `/nix/var/nix/profiles/per-user/root/channels` + A colon-separated list of directories used to look up the location of Nix + expressions using [paths](@docroot@/language/values.md#type-path) + enclosed in angle brackets (i.e., ``), + e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the + [`-I` option](@docroot@/command-ref/opt-common.md#opt-I). - If `NIX_PATH` is set to an empty string, resolving search paths will always fail. - For example, attempting to use `` will produce: + If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode: - error: file 'nixpkgs' was not found in the Nix search path + 1. `$HOME/.nix-defexpr/channels` + 2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs` + 3. `/nix/var/nix/profiles/per-user/root/channels` - - [`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE)\ - Normally, the Nix store directory (typically `/nix/store`) is not - allowed to contain any symlink components. This is to prevent - “impure” builds. Builders sometimes “canonicalise” paths by - resolving all symlink components. Thus, builds on different machines - (with `/nix/store` resolving to different locations) could yield - different results. This is generally not a problem, except when - builds are deployed to machines where `/nix/store` resolves - differently. If you are sure that you’re not going to do that, you - can set `NIX_IGNORE_SYMLINK_STORE` to `1`. + If `NIX_PATH` is set to an empty string, resolving search paths will always fail. + For example, attempting to use `` will produce: - Note that if you’re symlinking the Nix store so that you can put it - on another file system than the root file system, on Linux you’re - better off using `bind` mount points, e.g., + error: file 'nixpkgs' was not found in the Nix search path - ```console - $ mkdir /nix - $ mount -o bind /mnt/otherdisk/nix /nix - ``` +- [`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE) - Consult the mount 8 manual page for details. + Normally, the Nix store directory (typically `/nix/store`) is not + allowed to contain any symlink components. This is to prevent + “impure” builds. Builders sometimes “canonicalise” paths by + resolving all symlink components. Thus, builds on different machines + (with `/nix/store` resolving to different locations) could yield + different results. This is generally not a problem, except when + builds are deployed to machines where `/nix/store` resolves + differently. If you are sure that you’re not going to do that, you + can set `NIX_IGNORE_SYMLINK_STORE` to `1`. - - [`NIX_STORE_DIR`](#env-NIX_STORE_DIR)\ - Overrides the location of the Nix store (default `prefix/store`). + Note that if you’re symlinking the Nix store so that you can put it + on another file system than the root file system, on Linux you’re + better off using `bind` mount points, e.g., - - [`NIX_DATA_DIR`](#env-NIX_DATA_DIR)\ - Overrides the location of the Nix static data directory (default - `prefix/share`). + ```console + $ mkdir /nix + $ mount -o bind /mnt/otherdisk/nix /nix + ``` - - [`NIX_LOG_DIR`](#env-NIX_LOG_DIR)\ - Overrides the location of the Nix log directory (default - `prefix/var/log/nix`). + Consult the mount 8 manual page for details. - - [`NIX_STATE_DIR`](#env-NIX_STATE_DIR)\ - Overrides the location of the Nix state directory (default - `prefix/var/nix`). +- [`NIX_STORE_DIR`](#env-NIX_STORE_DIR) - - [`NIX_CONF_DIR`](#env-NIX_CONF_DIR)\ - Overrides the location of the system Nix configuration directory - (default `prefix/etc/nix`). + Overrides the location of the Nix store (default `prefix/store`). - - [`NIX_CONFIG`](#env-NIX_CONFIG)\ - Applies settings from Nix configuration from the environment. - The content is treated as if it was read from a Nix configuration file. - Settings are separated by the newline character. +- [`NIX_DATA_DIR`](#env-NIX_DATA_DIR) - - [`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)\ - Overrides the location of the Nix user configuration files to load from. + Overrides the location of the Nix static data directory (default + `prefix/share`). - The default are the locations according to the [XDG Base Directory Specification]. - See the [XDG Base Directories](#xdg-base-directories) sub-section for details. +- [`NIX_LOG_DIR`](#env-NIX_LOG_DIR) - The variable is treated as a list separated by the `:` token. + Overrides the location of the Nix log directory (default + `prefix/var/log/nix`). - - [`TMPDIR`](#env-TMPDIR)\ - Use the specified directory to store temporary files. In particular, - this includes temporary build directories; these can take up - substantial amounts of disk space. The default is `/tmp`. +- [`NIX_STATE_DIR`](#env-NIX_STATE_DIR) - - [`NIX_REMOTE`](#env-NIX_REMOTE)\ - This variable should be set to `daemon` if you want to use the Nix - daemon to execute Nix operations. This is necessary in [multi-user - Nix installations](@docroot@/installation/multi-user.md). If the Nix - daemon's Unix socket is at some non-standard path, this variable - should be set to `unix://path/to/socket`. Otherwise, it should be - left unset. + Overrides the location of the Nix state directory (default + `prefix/var/nix`). - - [`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS)\ - If set to `1`, Nix will print some evaluation statistics, such as - the number of values allocated. +- [`NIX_CONF_DIR`](#env-NIX_CONF_DIR) - - [`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS)\ - If set to `1`, Nix will print how often functions were called during - Nix expression evaluation. This is useful for profiling your Nix - expressions. + Overrides the location of the system Nix configuration directory + (default `prefix/etc/nix`). - - [`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE)\ - If Nix has been configured to use the Boehm garbage collector, this - variable sets the initial size of the heap in bytes. It defaults to - 384 MiB. Setting it to a low value reduces memory consumption, but - will increase runtime due to the overhead of garbage collection. +- [`NIX_CONFIG`](#env-NIX_CONFIG) + + Applies settings from Nix configuration from the environment. + The content is treated as if it was read from a Nix configuration file. + Settings are separated by the newline character. + +- [`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES) + + Overrides the location of the Nix user configuration files to load from. + + The default are the locations according to the [XDG Base Directory Specification]. + See the [XDG Base Directories](#xdg-base-directories) sub-section for details. + + The variable is treated as a list separated by the `:` token. + +- [`TMPDIR`](#env-TMPDIR) + + Use the specified directory to store temporary files. In particular, + this includes temporary build directories; these can take up + substantial amounts of disk space. The default is `/tmp`. + +- [`NIX_REMOTE`](#env-NIX_REMOTE) + + This variable should be set to `daemon` if you want to use the Nix + daemon to execute Nix operations. This is necessary in [multi-user + Nix installations](@docroot@/installation/multi-user.md). If the Nix + daemon's Unix socket is at some non-standard path, this variable + should be set to `unix://path/to/socket`. Otherwise, it should be + left unset. + +- [`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS) + + If set to `1`, Nix will print some evaluation statistics, such as + the number of values allocated. + +- [`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS) + + If set to `1`, Nix will print how often functions were called during + Nix expression evaluation. This is useful for profiling your Nix + expressions. + +- [`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE) + + If Nix has been configured to use the Boehm garbage collector, this + variable sets the initial size of the heap in bytes. It defaults to + 384 MiB. Setting it to a low value reduces memory consumption, but + will increase runtime due to the overhead of garbage collection. ## XDG Base Directories diff --git a/doc/manual/src/command-ref/nix-env/install.md b/doc/manual/src/command-ref/nix-env/install.md index ad179cbc7..c1fff50e8 100644 --- a/doc/manual/src/command-ref/nix-env/install.md +++ b/doc/manual/src/command-ref/nix-env/install.md @@ -14,23 +14,28 @@ # Description -The install operation creates a new user environment, based on the -current generation of the active profile, to which a set of store paths -described by *args* is added. The arguments *args* map to store paths in -a number of possible ways: +The install operation creates a new user environment. +It is based on the current generation of the active [profile](@docroot@/command-ref/files/profiles.md), to which a set of [store paths] described by *args* is added. - - By default, *args* is a set of derivation names denoting derivations - in the active Nix expression. These are realised, and the resulting - output paths are installed. Currently installed derivations with a - name equal to the name of a derivation being added are removed - unless the option `--preserve-installed` is specified. +[store paths]: @docroot@/glossary.md#gloss-store-path + +The arguments *args* map to store paths in a number of possible ways: + + + - By default, *args* is a set of [derivation] names denoting derivations in the [default Nix expression]. + These are [realised], and the resulting output paths are installed. + Currently installed derivations with a name equal to the name of a derivation being added are removed unless the option `--preserve-installed` is specified. + + [derivation]: @docroot@/glossary.md#gloss-derivation + [default Nix expression]: @docroot@/command-ref/files/default-nix-expression.md + [realised]: @docroot@/glossary.md#gloss-realise If there are multiple derivations matching a name in *args* that have the same name (e.g., `gcc-3.3.6` and `gcc-4.1.1`), then the derivation with the highest *priority* is used. A derivation can define a priority by declaring the `meta.priority` attribute. This attribute should be a number, with a higher value denoting a lower - priority. The default priority is `0`. + priority. The default priority is `5`. If there are multiple matching derivations with the same priority, then the derivation with the highest version will be installed. @@ -40,44 +45,90 @@ a number of possible ways: gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will probably cause a user environment conflict\!). - - If `--attr` (`-A`) is specified, the arguments are *attribute - paths* that select attributes from the top-level Nix - expression. This is faster than using derivation names and - unambiguous. To find out the attribute paths of available - packages, use `nix-env --query --available --attr-path `. + - If [`--attr`](#opt-attr) / `-A` is specified, the arguments are *attribute paths* that select attributes from the [default Nix expression]. + This is faster than using derivation names and unambiguous. + Show the attribute paths of available packages with [`nix-env --query`](./query.md): + + ```console + nix-env --query --available --attr-path` + ``` - If `--from-profile` *path* is given, *args* is a set of names - denoting installed store paths in the profile *path*. This is an + denoting installed [store paths] in the profile *path*. This is an easy way to copy user environment elements from one profile to another. - - If `--from-expression` is given, *args* are Nix - [functions](@docroot@/language/constructs.md#functions) - that are called with the active Nix expression as their single - argument. The derivations returned by those function calls are - installed. This allows derivations to be specified in an - unambiguous way, which is necessary if there are multiple - derivations with the same name. + - If `--from-expression` is given, *args* are [Nix language functions](@docroot@/language/constructs.md#functions) that are called with the [default Nix expression] as their single argument. + The derivations returned by those function calls are installed. + This allows derivations to be specified in an unambiguous way, which is necessary if there are multiple derivations with the same name. - - If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are - [realised](@docroot@/command-ref/nix-store/realise.md), and the resulting output paths - are installed. + - If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are [realised], and the resulting output paths are installed. - - If *args* are store paths that are not store derivations, then these - are [realised](@docroot@/command-ref/nix-store/realise.md) and installed. + - If *args* are [store paths] that are not store derivations, then these are [realised] and installed. - - By default all outputs are installed for each derivation. That can - be reduced by setting `meta.outputsToInstall`. + - By default all [outputs](@docroot@/language/derivations.md#attr-outputs) are installed for each [derivation]. + This can be overridden by adding a `meta.outputsToInstall` attribute on the derivation listing a subset of the output names. -# Flags + Example: + + The file `example.nix` defines a derivation with two outputs `foo` and `bar`, each containing a file. + + ```nix + # example.nix + let + pkgs = import {}; + command = '' + ${pkgs.coreutils}/bin/mkdir -p $foo $bar + echo foo > $foo/foo-file + echo bar > $bar/bar-file + ''; + in + derivation { + name = "example"; + builder = "${pkgs.bash}/bin/bash"; + args = [ "-c" command ]; + outputs = [ "foo" "bar" ]; + system = builtins.currentSystem; + } + ``` + + Installing from this Nix expression will make files from both outputs appear in the current profile. + + ```console + $ nix-env --install --file example.nix + installing 'example' + $ ls ~/.nix-profile + foo-file + bar-file + manifest.nix + ``` + + Adding `meta.outputsToInstall` to that derivation will make `nix-env` only install files from the specified outputs. + + ```nix + # example-outputs.nix + import ./example.nix // { meta.outputsToInstall = [ "bar" ]; } + ``` + + ```console + $ nix-env --install --file example-outputs.nix + installing 'example' + $ ls ~/.nix-profile + bar-file + manifest.nix + ``` + +# Options + + - `--prebuilt-only` / `-b` - - `--prebuilt-only` / `-b`\ Use only derivations for which a substitute is registered, i.e., there is a pre-built binary available that can be downloaded in lieu of building the derivation. Thus, no packages will be built from source. - - `--preserve-installed` / `-P`\ + - `--preserve-installed` / `-P` + Do not remove derivations with a name matching one of the derivations being installed. Usually, trying to have two versions of the same package installed in the same generation of a profile will @@ -85,7 +136,8 @@ a number of possible ways: clashes between the two versions. However, this is not the case for all packages. - - `--remove-all` / `-r`\ + - `--remove-all` / `-r` + Remove all previously installed packages first. This is equivalent to running `nix-env --uninstall '.*'` first, except that everything happens in a single transaction. diff --git a/doc/manual/src/command-ref/nix-prefetch-url.md b/doc/manual/src/command-ref/nix-prefetch-url.md index 3bcd209e2..45ef01e02 100644 --- a/doc/manual/src/command-ref/nix-prefetch-url.md +++ b/doc/manual/src/command-ref/nix-prefetch-url.md @@ -31,15 +31,18 @@ store already contains a file with the same hash and base name. Otherwise, the file is downloaded, and an error is signaled if the actual hash of the file does not match the specified hash. -This command prints the hash on standard output. Additionally, if the -option `--print-path` is used, the path of the downloaded file in the -Nix store is also printed. +This command prints the hash on standard output. +The hash is printed using base-32 unless `--type md5` is specified, +in which case it's printed using base-16. +Additionally, if the option `--print-path` is used, +the path of the downloaded file in the Nix store is also printed. # Options - `--type` *hashAlgo*\ - Use the specified cryptographic hash algorithm, which can be one of - `md5`, `sha1`, `sha256`, and `sha512`. + Use the specified cryptographic hash algorithm, + which can be one of `md5`, `sha1`, `sha256`, and `sha512`. + The default is `sha256`. - `--print-path`\ Print the store path of the downloaded file on standard output. diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md index 195b72be5..1eaf3c36a 100644 --- a/doc/manual/src/command-ref/nix-shell.md +++ b/doc/manual/src/command-ref/nix-shell.md @@ -235,14 +235,14 @@ package like Terraform: ```bash #! /usr/bin/env nix-shell -#! nix-shell -i bash --packages "terraform.withPlugins (plugins: [ plugins.openstack ])" +#! nix-shell -i bash --packages 'terraform.withPlugins (plugins: [ plugins.openstack ])' terraform apply ``` > **Note** > -> You must use double quotes (`"`) when passing a simple Nix expression +> You must use single or double quotes (`'`, `"`) when passing a simple Nix expression > in a nix-shell shebang. Finally, using the merging of multiple nix-shell shebangs the following @@ -251,7 +251,7 @@ branch): ```haskell #! /usr/bin/env nix-shell -#! nix-shell -i runghc --packages "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])" +#! nix-shell -i runghc --packages 'haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])' #! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-20.03.tar.gz import Network.Curl.Download diff --git a/doc/manual/src/command-ref/nix-store/query.md b/doc/manual/src/command-ref/nix-store/query.md index cd45a4932..a158c76aa 100644 --- a/doc/manual/src/command-ref/nix-store/query.md +++ b/doc/manual/src/command-ref/nix-store/query.md @@ -5,8 +5,8 @@ # Synopsis `nix-store` {`--query` | `-q`} - {`--outputs` | `--requisites` | `-R` | `--references` | - `--referrers` | `--referrers-closure` | `--deriver` | `-d` | + {`--outputs` | `--requisites` | `-R` | `--references` | `--referrers` | + `--referrers-closure` | `--deriver` | `-d` | `--valid-derivers` | `--graph` | `--tree` | `--binding` *name* | `-b` *name* | `--hash` | `--size` | `--roots`} [`--use-output`] [`-u`] [`--force-realise`] [`-f`] @@ -82,13 +82,21 @@ symlink. in the Nix store that are dependent on *paths*. - `--deriver`; `-d`\ - Prints the [deriver] of the store paths *paths*. If + Prints the [deriver] that was used to build the store paths *paths*. If the path has no deriver (e.g., if it is a source file), or if the deriver is not known (e.g., in the case of a binary-only deployment), the string `unknown-deriver` is printed. + The returned deriver is not guaranteed to exist in the local store, for + example when *paths* were substituted from a binary cache. + Use `--valid-derivers` instead to obtain valid paths only. [deriver]: ../../glossary.md#gloss-deriver + - `--valid-derivers`\ + Prints a set of derivation files (`.drv`) which are supposed produce + said paths when realized. Might print nothing, for example for source paths + or paths subsituted from a binary cache. + - `--graph`\ Prints the references graph of the store paths *paths* in the format of the `dot` tool of AT\&T's [Graphviz diff --git a/doc/manual/src/command-ref/nix-store/realise.md b/doc/manual/src/command-ref/nix-store/realise.md index c19aea75e..5428d57fa 100644 --- a/doc/manual/src/command-ref/nix-store/realise.md +++ b/doc/manual/src/command-ref/nix-store/realise.md @@ -1,6 +1,6 @@ # Name -`nix-store --realise` - realise specified store paths +`nix-store --realise` - build or fetch store objects # Synopsis @@ -8,33 +8,39 @@ # Description -The operation `--realise` essentially “builds” the specified store -paths. Realisation is a somewhat overloaded term: - - If the store path is a *derivation*, realisation ensures that the - output paths of the derivation are [valid] (i.e., - the output path and its closure exist in the file system). This - can be done in several ways. First, it is possible that the - outputs are already valid, in which case we are done - immediately. Otherwise, there may be [substitutes] - that produce the outputs (e.g., by downloading them). Finally, the - outputs can be produced by running the build task described - by the derivation. +Each of *paths* is processed as follows: - - If the store path is not a derivation, realisation ensures that the - specified path is valid (i.e., it and its closure exist in the file - system). If the path is already valid, we are done immediately. - Otherwise, the path and any missing paths in its closure may be - produced through substitutes. If there are no (successful) - substitutes, realisation fails. +- If the path leads to a [store derivation]: + 1. If it is not [valid], substitute the store derivation file itself. + 2. Realise its [output paths]: + - Try to fetch from [substituters] the [store objects] associated with the output paths in the store derivation's [closure]. + - With [content-addressed derivations] (experimental): + Determine the output paths to realise by querying content-addressed realisation entries in the [Nix database]. + - For any store paths that cannot be substituted, produce the required store objects: + 1. Realise all outputs of the derivation's dependencies + 2. Run the derivation's [`builder`](@docroot@/language/derivations.md#attr-builder) executable + +- Otherwise, and if the path is not already valid: Try to fetch the associated [store objects] in the path's [closure] from [substituters]. +If no substitutes are available and no store derivation is given, realisation fails. + +[store paths]: @docroot@/glossary.md#gloss-store-path [valid]: @docroot@/glossary.md#gloss-validity -[substitutes]: @docroot@/glossary.md#gloss-substitute +[store derivation]: @docroot@/glossary.md#gloss-store-derivation +[output paths]: @docroot@/glossary.md#gloss-output-path +[store objects]: @docroot@/glossary.md#gloss-store-object +[closure]: @docroot@/glossary.md#gloss-closure +[substituters]: @docroot@/command-ref/conf-file.md#conf-substituters +[content-addressed derivations]: @docroot@/contributing/experimental-features.md#xp-feature-ca-derivations +[Nix database]: @docroot@/glossary.md#gloss-nix-database -The output path of each derivation is printed on standard output. (For -non-derivations argument, the argument itself is printed.) +The resulting paths are printed on standard output. +For non-derivation arguments, the argument itself is printed. -The following flags are available: +{{#include ../status-build-failure.md}} + +# Options - `--dry-run`\ Print on standard error a description of what packages would be @@ -54,8 +60,6 @@ The following flags are available: previous build, the new output path is left in `/nix/store/name.check.` -{{#include ../status-build-failure.md}} - {{#include ./opt-common.md}} {{#include ../opt-common.md}} @@ -67,8 +71,6 @@ The following flags are available: This operation is typically used to build [store derivation]s produced by [`nix-instantiate`](@docroot@/command-ref/nix-instantiate.md): -[store derivation]: @docroot@/glossary.md#gloss-store-derivation - ```console $ nix-store --realise $(nix-instantiate ./test.nix) /nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1 diff --git a/doc/manual/src/command-ref/opt-common-syn.md b/doc/manual/src/command-ref/opt-common-syn.md deleted file mode 100644 index b66d318c2..000000000 --- a/doc/manual/src/command-ref/opt-common-syn.md +++ /dev/null @@ -1,57 +0,0 @@ -\--help - -\--version - -\--verbose - -\-v - -\--quiet - -\--log-format - -format - -\--no-build-output - -\-Q - -\--max-jobs - -\-j - -number - -\--cores - -number - -\--max-silent-time - -number - -\--timeout - -number - -\--keep-going - -\-k - -\--keep-failed - -\-K - -\--fallback - -\--readonly-mode - -\-I - -path - -\--option - -name - -value diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md index 54c0a1d0d..114b292f9 100644 --- a/doc/manual/src/command-ref/opt-common.md +++ b/doc/manual/src/command-ref/opt-common.md @@ -2,217 +2,208 @@ Most Nix commands accept the following command-line options: - - [`--help`](#opt-help)\ - Prints out a summary of the command syntax and exits. +- [`--help`](#opt-help) - - [`--version`](#opt-version)\ - Prints out the Nix version number on standard output and exits. + Prints out a summary of the command syntax and exits. - - [`--verbose`](#opt-verbose) / `-v`\ - Increases the level of verbosity of diagnostic messages printed on - standard error. For each Nix operation, the information printed on - standard output is well-defined; any diagnostic information is - printed on standard error, never on standard output. +- [`--version`](#opt-version) - This option may be specified repeatedly. Currently, the following - verbosity levels exist: + Prints out the Nix version number on standard output and exits. - - 0\ - “Errors only”: only print messages explaining why the Nix - invocation failed. +- [`--verbose`](#opt-verbose) / `-v` - - 1\ - “Informational”: print *useful* messages about what Nix is - doing. This is the default. + Increases the level of verbosity of diagnostic messages printed on standard error. + For each Nix operation, the information printed on standard output is well-defined; + any diagnostic information is printed on standard error, never on standard output. - - 2\ - “Talkative”: print more informational messages. + This option may be specified repeatedly. + Currently, the following verbosity levels exist: - - 3\ - “Chatty”: print even more informational messages. + - `0` “Errors only” - - 4\ - “Debug”: print debug information. + Only print messages explaining why the Nix invocation failed. - - 5\ - “Vomit”: print vast amounts of debug information. + - `1` “Informational” - - [`--quiet`](#opt-quiet)\ - Decreases the level of verbosity of diagnostic messages printed on - standard error. This is the inverse option to `-v` / `--verbose`. + Print *useful* messages about what Nix is doing. + This is the default. - This option may be specified repeatedly. See the previous verbosity - levels list. + - `2` “Talkative” - - [`--log-format`](#opt-log-format) *format*\ - This option can be used to change the output of the log format, with - *format* being one of: + Print more informational messages. - - raw\ - This is the raw format, as outputted by nix-build. + - `3` “Chatty” - - internal-json\ - Outputs the logs in a structured manner. + Print even more informational messages. - > **Warning** - > - > While the schema itself is relatively stable, the format of - > the error-messages (namely of the `msg`-field) can change - > between releases. + - `4` “Debug” + + Print debug information. - - bar\ - Only display a progress bar during the builds. + - `5` “Vomit” - - bar-with-logs\ - Display the raw logs, with the progress bar at the bottom. + Print vast amounts of debug information. - - [`--no-build-output`](#opt-no-build-output) / `-Q`\ - By default, output written by builders to standard output and - standard error is echoed to the Nix command's standard error. This - option suppresses this behaviour. Note that the builder's standard - output and error are always written to a log file in - `prefix/nix/var/log/nix`. +- [`--quiet`](#opt-quiet) - - [`--max-jobs`](#opt-max-jobs) / `-j` *number*\ - Sets the maximum number of build jobs that Nix will perform in - parallel to the specified number. Specify `auto` to use the number - of CPUs in the system. The default is specified by the `max-jobs` - configuration setting, which itself defaults to `1`. A higher - value is useful on SMP systems or to exploit I/O latency. + Decreases the level of verbosity of diagnostic messages printed on standard error. + This is the inverse option to `-v` / `--verbose`. - Setting it to `0` disallows building on the local machine, which is - useful when you want builds to happen only on remote builders. + This option may be specified repeatedly. + See the previous verbosity levels list. - - [`--cores`](#opt-cores)\ - Sets the value of the `NIX_BUILD_CORES` environment variable in - the invocation of builders. Builders can use this variable at - their discretion to control the maximum amount of parallelism. For - instance, in Nixpkgs, if the derivation attribute - `enableParallelBuilding` is set to `true`, the builder passes the - `-jN` flag to GNU Make. It defaults to the value of the `cores` - configuration setting, if set, or `1` otherwise. The value `0` - means that the builder should use all available CPU cores in the - system. +- [`--log-format`](#opt-log-format) *format* - - [`--max-silent-time`](#opt-max-silent-time)\ - Sets the maximum number of seconds that a builder can go without - producing any data on standard output or standard error. The - default is specified by the `max-silent-time` configuration - setting. `0` means no time-out. + This option can be used to change the output of the log format, with *format* being one of: - - [`--timeout`](#opt-timeout)\ - Sets the maximum number of seconds that a builder can run. The - default is specified by the `timeout` configuration setting. `0` - means no timeout. + - `raw` - - [`--keep-going`](#opt-keep-going) / `-k`\ - Keep going in case of failed builds, to the greatest extent - possible. That is, if building an input of some derivation fails, - Nix will still build the other inputs, but not the derivation - itself. Without this option, Nix stops if any build fails (except - for builds of substitutes), possibly killing builds in progress (in - case of parallel or distributed builds). + This is the raw format, as outputted by nix-build. - - [`--keep-failed`](#opt-keep-failed) / `-K`\ - Specifies that in case of a build failure, the temporary directory - (usually in `/tmp`) in which the build takes place should not be - deleted. The path of the build directory is printed as an - informational message. + - `internal-json` - - [`--fallback`](#opt-fallback)\ - Whenever Nix attempts to build a derivation for which substitutes - are known for each output path, but realising the output paths - through the substitutes fails, fall back on building the derivation. + Outputs the logs in a structured manner. - The most common scenario in which this is useful is when we have - registered substitutes in order to perform binary distribution from, - say, a network repository. If the repository is down, the - realisation of the derivation will fail. When this option is - specified, Nix will build the derivation instead. Thus, installation - from binaries falls back on installation from source. This option is - not the default since it is generally not desirable for a transient - failure in obtaining the substitutes to lead to a full build from - source (with the related consumption of resources). + > **Warning** + > + > While the schema itself is relatively stable, the format of + > the error-messages (namely of the `msg`-field) can change + > between releases. - - [`--readonly-mode`](#opt-readonly-mode)\ - When this option is used, no attempt is made to open the Nix - database. Most Nix operations do need database access, so those - operations will fail. + - `bar` - - [`--arg`](#opt-arg) *name* *value*\ - This option is accepted by `nix-env`, `nix-instantiate`, - `nix-shell` and `nix-build`. When evaluating Nix expressions, the - expression evaluator will automatically try to call functions that - it encounters. It can automatically call functions for which every - argument has a [default - value](@docroot@/language/constructs.md#functions) (e.g., - `{ argName ? defaultValue }: ...`). With `--arg`, you can also - call functions that have arguments without a default value (or - override a default value). That is, if the evaluator encounters a - function with an argument named *name*, it will call it with value - *value*. + Only display a progress bar during the builds. - For instance, the top-level `default.nix` in Nixpkgs is actually a - function: + - `bar-with-logs` - ```nix - { # The system (e.g., `i686-linux') for which to build the packages. - system ? builtins.currentSystem - ... - }: ... - ``` + Display the raw logs, with the progress bar at the bottom. - So if you call this Nix expression (e.g., when you do `nix-env --install --attr - pkgname`), the function will be called automatically using the - value [`builtins.currentSystem`](@docroot@/language/builtins.md) for - the `system` argument. You can override this using `--arg`, e.g., - `nix-env --install --attr pkgname --arg system \"i686-freebsd\"`. (Note that - since the argument is a Nix string literal, you have to escape the - quotes.) +- [`--no-build-output`](#opt-no-build-output) / `-Q` - - [`--argstr`](#opt-argstr) *name* *value*\ - This option is like `--arg`, only the value is not a Nix - expression but a string. So instead of `--arg system - \"i686-linux\"` (the outer quotes are to keep the shell happy) you - can say `--argstr system i686-linux`. + By default, output written by builders to standard output and standard error is echoed to the Nix command's standard error. + This option suppresses this behaviour. + Note that the builder's standard output and error are always written to a log file in `prefix/nix/var/log/nix`. - - [`--attr`](#opt-attr) / `-A` *attrPath*\ - Select an attribute from the top-level Nix expression being - evaluated. (`nix-env`, `nix-instantiate`, `nix-build` and - `nix-shell` only.) The *attribute path* *attrPath* is a sequence - of attribute names separated by dots. For instance, given a - top-level Nix expression *e*, the attribute path `xorg.xorgserver` - would cause the expression `e.xorg.xorgserver` to be used. See - [`nix-env --install`](@docroot@/command-ref/nix-env/install.md) for some - concrete examples. +- [`--max-jobs`](#opt-max-jobs) / `-j` *number* - In addition to attribute names, you can also specify array indices. - For instance, the attribute path `foo.3.bar` selects the `bar` - attribute of the fourth element of the array in the `foo` attribute - of the top-level expression. + Sets the maximum number of build jobs that Nix will perform in parallel to the specified number. + Specify `auto` to use the number of CPUs in the system. + The default is specified by the `max-jobs` configuration setting, which itself defaults to `1`. + A higher value is useful on SMP systems or to exploit I/O latency. - - [`--expr`](#opt-expr) / `-E`\ - Interpret the command line arguments as a list of Nix expressions to - be parsed and evaluated, rather than as a list of file names of Nix - expressions. (`nix-instantiate`, `nix-build` and `nix-shell` only.) + Setting it to `0` disallows building on the local machine, which is useful when you want builds to happen only on remote builders. - For `nix-shell`, this option is commonly used to give you a shell in - which you can build the packages returned by the expression. If you - want to get a shell which contain the *built* packages ready for - use, give your expression to the `nix-shell --packages ` convenience flag - instead. +- [`--cores`](#opt-cores) - - [`-I`](#opt-I) *path*\ - Add an entry to the [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path). - This option may be given multiple times. - Paths added through `-I` take precedence over [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH). + Sets the value of the `NIX_BUILD_CORES` environment variable in the invocation of builders. + Builders can use this variable at their discretion to control the maximum amount of parallelism. + For instance, in Nixpkgs, if the derivation attribute `enableParallelBuilding` is set to `true`, the builder passes the `-jN` flag to GNU Make. + It defaults to the value of the `cores` configuration setting, if set, or `1` otherwise. + The value `0` means that the builder should use all available CPU cores in the system. - - [`--option`](#opt-option) *name* *value*\ - Set the Nix configuration option *name* to *value*. This overrides - settings in the Nix configuration file (see nix.conf5). +- [`--max-silent-time`](#opt-max-silent-time) - - [`--repair`](#opt-repair)\ - Fix corrupted or missing store paths by redownloading or rebuilding - them. Note that this is slow because it requires computing a - cryptographic hash of the contents of every path in the closure of - the build. Also note the warning under `nix-store --repair-path`. + Sets the maximum number of seconds that a builder can go without producing any data on standard output or standard error. + The default is specified by the `max-silent-time` configuration setting. + `0` means no time-out. + +- [`--timeout`](#opt-timeout) + + Sets the maximum number of seconds that a builder can run. + The default is specified by the `timeout` configuration setting. + `0` means no timeout. + +- [`--keep-going`](#opt-keep-going) / `-k` + + Keep going in case of failed builds, to the greatest extent possible. + That is, if building an input of some derivation fails, Nix will still build the other inputs, but not the derivation itself. + Without this option, Nix stops if any build fails (except for builds of substitutes), possibly killing builds in progress (in case of parallel or distributed builds). + +- [`--keep-failed`](#opt-keep-failed) / `-K` + + Specifies that in case of a build failure, the temporary directory (usually in `/tmp`) in which the build takes place should not be deleted. + The path of the build directory is printed as an informational message. + +- [`--fallback`](#opt-fallback) + + Whenever Nix attempts to build a derivation for which substitutes are known for each output path, but realising the output paths through the substitutes fails, fall back on building the derivation. + + The most common scenario in which this is useful is when we have registered substitutes in order to perform binary distribution from, say, a network repository. + If the repository is down, the realisation of the derivation will fail. + When this option is specified, Nix will build the derivation instead. + Thus, installation from binaries falls back on installation from source. + This option is not the default since it is generally not desirable for a transient failure in obtaining the substitutes to lead to a full build from source (with the related consumption of resources). + +- [`--readonly-mode`](#opt-readonly-mode) + + When this option is used, no attempt is made to open the Nix database. + Most Nix operations do need database access, so those operations will fail. + +- [`--arg`](#opt-arg) *name* *value* + + This option is accepted by `nix-env`, `nix-instantiate`, `nix-shell` and `nix-build`. + When evaluating Nix expressions, the expression evaluator will automatically try to call functions that it encounters. + It can automatically call functions for which every argument has a [default value](@docroot@/language/constructs.md#functions) (e.g., `{ argName ? defaultValue }: ...`). + + With `--arg`, you can also call functions that have arguments without a default value (or override a default value). + That is, if the evaluator encounters a function with an argument named *name*, it will call it with value *value*. + + For instance, the top-level `default.nix` in Nixpkgs is actually a function: + + ```nix + { # The system (e.g., `i686-linux') for which to build the packages. + system ? builtins.currentSystem + ... + }: ... + ``` + + So if you call this Nix expression (e.g., when you do `nix-env --install --attr pkgname`), the function will be called automatically using the value [`builtins.currentSystem`](@docroot@/language/builtins.md) for the `system` argument. + You can override this using `--arg`, e.g., `nix-env --install --attr pkgname --arg system \"i686-freebsd\"`. + (Note that since the argument is a Nix string literal, you have to escape the quotes.) + +- [`--argstr`](#opt-argstr) *name* *value* + + This option is like `--arg`, only the value is not a Nix expression but a string. + So instead of `--arg system \"i686-linux\"` (the outer quotes are to keep the shell happy) you can say `--argstr system i686-linux`. + +- [`--attr`](#opt-attr) / `-A` *attrPath* + + Select an attribute from the top-level Nix expression being evaluated. + (`nix-env`, `nix-instantiate`, `nix-build` and `nix-shell` only.) + The *attribute path* *attrPath* is a sequence of attribute names separated by dots. + For instance, given a top-level Nix expression *e*, the attribute path `xorg.xorgserver` would cause the expression `e.xorg.xorgserver` to be used. + See [`nix-env --install`](@docroot@/command-ref/nix-env/install.md) for some concrete examples. + + In addition to attribute names, you can also specify array indices. + For instance, the attribute path `foo.3.bar` selects the `bar` + attribute of the fourth element of the array in the `foo` attribute + of the top-level expression. + +- [`--expr`](#opt-expr) / `-E` + + Interpret the command line arguments as a list of Nix expressions to be parsed and evaluated, rather than as a list of file names of Nix expressions. + (`nix-instantiate`, `nix-build` and `nix-shell` only.) + + For `nix-shell`, this option is commonly used to give you a shell in which you can build the packages returned by the expression. + If you want to get a shell which contain the *built* packages ready for use, give your expression to the `nix-shell --packages ` convenience flag instead. + +- [`-I`](#opt-I) *path* + + Add an entry to the [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path). + This option may be given multiple times. + Paths added through `-I` take precedence over [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH). + +- [`--option`](#opt-option) *name* *value* + + Set the Nix configuration option *name* to *value*. + This overrides settings in the Nix configuration file (see nix.conf5). + +- [`--repair`](#opt-repair) + + Fix corrupted or missing store paths by redownloading or rebuilding them. + Note that this is slow because it requires computing a cryptographic hash of the contents of every path in the closure of the build. + Also note the warning under `nix-store --repair-path`. + +> **Note** +> +> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags. diff --git a/doc/manual/src/command-ref/opt-inst-syn.md b/doc/manual/src/command-ref/opt-inst-syn.md deleted file mode 100644 index 1703c40e3..000000000 --- a/doc/manual/src/command-ref/opt-inst-syn.md +++ /dev/null @@ -1,15 +0,0 @@ -\--prebuilt-only - -\-b - -\--attr - -\-A - -\--from-expression - -\-E - -\--from-profile - -path diff --git a/doc/manual/src/contributing/contributing.md b/doc/manual/src/contributing/contributing.md index 854139a31..4d55c17a4 100644 --- a/doc/manual/src/contributing/contributing.md +++ b/doc/manual/src/contributing/contributing.md @@ -1 +1,8 @@ -# Contributing +# Development + +Nix is developed on GitHub. +Check the [contributing guide](https://github.com/NixOS/nix/blob/master/CONTRIBUTING.md) if you want to get involved. + +This chapter is a collection of guides for making changes to the code and documentation. + +If you're not sure where to start, try to [compile Nix from source](./hacking.md) and consider [making improvements to documentation](./documentation.md). diff --git a/doc/manual/src/contributing/documentation.md b/doc/manual/src/contributing/documentation.md new file mode 100644 index 000000000..f73ab2149 --- /dev/null +++ b/doc/manual/src/contributing/documentation.md @@ -0,0 +1,181 @@ +# Contributing documentation + +Improvements to documentation are very much appreciated, and a good way to start out with contributing to Nix. + +This is how you can help: +- Address [open issues with documentation](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation) +- Review [pull requests concerning documentation](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+label%3Adocumentation) + +Incremental refactorings of the documentation build setup to make it faster or easier to understand and maintain are also welcome. + +## Building the manual + +Build the manual from scratch: + +```console +nix-build $(nix-instantiate)'!doc' +``` + +or + +```console +nix build .#^doc +``` + +and open `./result-doc/share/doc/nix/manual/index.html`. + +To build the manual incrementally, [enter the development shell](./hacking.md) and run: + +```console +make manual-html -j $NIX_BUILD_CORES +``` + +and open `./outputs/out/share/doc/nix/manual/language/index.html`. + +In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building: + +[Makefile for the manual]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk + +```console +rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES +``` + +## Style guide + +The goal of this style guide is to make it such that +- The manual is easy to search and skim for relevant information +- Documentation sources are easy to edit +- Changes to documentation are easy to review + +You will notice that this is not implemented consistently yet. +Please follow the guide when making additions or changes to existing documentation. +Do not make sweeping changes, unless they are programmatic and can be validated easily. + +### Language + +This manual is [reference documentation](https://diataxis.fr/reference/). +The typical usage pattern is to look up isolated pieces of information. +It should therefore aim to be correct, consistent, complete, and easy to navigate at a glance. + +- Aim for clarity and brevity. + + Please take the time to read the [plain language guidelines](https://www.plainlanguage.gov/guidelines/) for details. + +- Describe the subject factually. + + In particular, do not make value judgements or recommendations. + Check the code or add tests if in doubt. + +- Provide complete, minimal examples, and explain them. + + Readers should be able to try examples verbatim and get the same results as shown in the manual. + Always describe in words what a given example does. + + Non-trivial examples may need additional explanation, especially if they use concepts from outside the given context. + +- Use British English. + + This is a somewhat arbitrary choice to force consistency, and accounts for the fact that a majority of Nix users and developers are from Europe. + +### Links and anchors + +Reference documentation must be readable in arbitrary order. +Readers cannot be expected to have any particular prerequisite knowledge about Nix. +While the table of contents can provide guidance and full-text search can help, they are most likely to find what they need by following sensible cross-references. + +- Link to technical terms + + When mentioning Nix-specific concepts, commands, options, settings, etc., link to appropriate documentation. + Also link to external tools or concepts, especially if their meaning may be ambiguous. + You may also want to link to definitions of less common technical terms. + + Then readers won't have to actively search for definitions and are more likely to discover relevant information on their own. + + > **Note** + > + > `man` and `--help` pages don't display links. + > Use appropriate link texts such that readers of terminal output can infer search terms. + +- Do not break existing URLs between releases. + + There are countless links in the wild pointing to old versions of the manual. + We want people to find up-to-date documentation when following popular advice. + + - When moving files, update [redirects on nixos.org](https://github.com/NixOS/nixos-homepage/blob/master/netlify.toml). + + This is especially important when moving information out of the Nix manual to other resources. + + - When changing anchors, update [client-side redirects](https://github.com/NixOS/nix/blob/master/doc/manual/redirects.js) + + The current setup is cumbersome, and help making better automation is appreciated. + +The build checks for broken internal links with. +This happens late in the process, so [building the whole manual](#building-the-manual) is not suitable for iterating quickly. +[`mdbook-linkcheck`] does not implement checking [URI fragments] yet. + +[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck +[URI fragments]: https://en.wikipedia.org/wiki/URI_fragment + +### Markdown conventions + +The manual is written in markdown, and rendered with [mdBook](https://github.com/rust-lang/mdBook) for the web and with [lowdown](https://github.com/kristapsdz/lowdown) for `man` pages and `--help` output. + +For supported markdown features, refer to: +- [mdBook documentation](https://rust-lang.github.io/mdBook/format/markdown.html) +- [lowdown documentation](https://kristaps.bsd.lv/lowdown/) + +Please observe these guidelines to ease reviews: + +- Write one sentence per line. + + This makes long sentences immediately visible, and makes it easier to review changes and make direct suggestions. + +- Use reference links – sparingly – to ease source readability. + Put definitions close to their first use. + + Example: + + ``` + A [store object] contains a [file system object] and [references] to other store objects. + + [store object]: @docroot@/glossary.md#gloss-store-object + [file system object]: @docroot@/architecture/file-system-object.md + [references]: @docroot@/glossary.md#gloss-reference + ``` + +- Use admonitions of the following form: + + ``` + > **Note** + > + > This is a note. + ``` + +### The `@docroot@` variable + +`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own. + +If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory. + +If the `@docroot@` literal appears in an error message from the [`mdbook-linkcheck`] tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it. +See existing `@docroot@` logic in the [Makefile for the manual]. +Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`. + +## API documentation + +[Doxygen API documentation] is available online. +You can also build and view it yourself: + +[Doxygen API documentation]: https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs + +```console +# nix build .#hydraJobs.internal-api-docs +# xdg-open ./result/share/doc/nix/internal-api/html/index.html +``` + +or inside `nix-shell` or `nix develop`: + +``` +# make internal-api-html +# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html +``` diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 4b0a3a3e5..38c144fcc 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -42,8 +42,8 @@ $ nix develop .#native-clang11StdenvPackages To build Nix itself in this shell: ```console -[nix-shell]$ ./bootstrap.sh -[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out +[nix-shell]$ autoreconfPhase +[nix-shell]$ configurePhase [nix-shell]$ make -j $NIX_BUILD_CORES ``` @@ -86,7 +86,7 @@ $ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages To build Nix itself in this shell: ```console -[nix-shell]$ ./bootstrap.sh +[nix-shell]$ autoreconfPhase [nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out [nix-shell]$ make -j $NIX_BUILD_CORES ``` @@ -220,68 +220,3 @@ Configure your editor to use the `clangd` from the shell, either by running it i > For some editors (e.g. Visual Studio Code), you may need to install a [special extension](https://open-vsx.org/extension/llvm-vs-code-extensions/vscode-clangd) for the editor to interact with `clangd`. > Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim). > Editor-specific setup is typically opinionated, so we will not cover it here in more detail. - -### Checking links in the manual - -The build checks for broken internal links. -This happens late in the process, so `nix build` is not suitable for iterating. -To build the manual incrementally, run: - -```console -make html -j $NIX_BUILD_CORES -``` - -In order to reflect changes to the [Makefile], clear all generated files before re-building: - -[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk - -```console -rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES -``` - -[`mdbook-linkcheck`] does not implement checking [URI fragments] yet. - -[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck -[URI fragments]: https://en.wikipedia.org/wiki/URI_fragment - -#### `@docroot@` variable - -`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own. - -If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory. - -If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it. -See existing `@docroot@` logic in the [Makefile]. -Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`. - -## API documentation - -Doxygen API documentation is [available -online](https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs). You -can also build and view it yourself: - -```console -# nix build .#hydraJobs.internal-api-docs -# xdg-open ./result/share/doc/nix/internal-api/html/index.html -``` - -or inside a `nix develop` shell by running: - -``` -# make internal-api-html -# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html -``` - -## Coverage analysis - -A coverage analysis report is [available -online](https://hydra.nixos.org/job/nix/master/coverage/latest/download-by-type/report/coverage). You -can build it yourself: - -``` -# nix build .#hydraJobs.coverage -# xdg-open ./result/coverage/index.html -``` - -Metrics about the change in line/function coverage over time are also -[available](https://hydra.nixos.org/job/nix/master/coverage#tabs-charts). diff --git a/doc/manual/src/contributing/testing.md b/doc/manual/src/contributing/testing.md index c3c82e3c0..3d75ebe7b 100644 --- a/doc/manual/src/contributing/testing.md +++ b/doc/manual/src/contributing/testing.md @@ -1,17 +1,88 @@ # Running tests +## Coverage analysis + +A [coverage analysis report] is available online +You can build it yourself: + +[coverage analysis report]: https://hydra.nixos.org/job/nix/master/coverage/latest/download-by-type/report/coverage + +``` +# nix build .#hydraJobs.coverage +# xdg-open ./result/coverage/index.html +``` + +[Extensive records of build metrics](https://hydra.nixos.org/job/nix/master/coverage#tabs-charts), such as test coverage over time, are also available online. + ## Unit-tests -The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined -under `src/{library_name}/tests` using the -[googletest](https://google.github.io/googletest/) and -[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks. +The unit tests are defined using the [googletest] and [rapidcheck] frameworks. -You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option. +[googletest]: https://google.github.io/googletest/ +[rapidcheck]: https://github.com/emil-e/rapidcheck + +### Source and header layout + +> An example of some files, demonstrating much of what is described below +> +> ``` +> src +> ├── libexpr +> │ ├── value/context.hh +> │ ├── value/context.cc +> │ │ +> │ … +> └── tests +> │ ├── value/context.hh +> │ ├── value/context.cc +> │ │ +> │ … +> │ +> ├── unit-test-data +> │ ├── libstore +> │ │ ├── worker-protocol/content-address.bin +> │ │ … +> │ … +> … +> ``` + +The unit tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `src/${library_shortname}/tests` within the directory for the library (`src/${library_shortname}`). + +The data is in `unit-test-data`, with one subdir per library, with the same name as where the code goes. +For example, `libnixstore` code is in `src/libstore`, and its test data is in `unit-test-data/libstore`. +The path to the `unit-test-data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`. + +> **Note** +> Due to the way googletest works, downstream unit test executables will actually include and re-run upstream library tests. +> Therefore it is important that the same value for `_NIX_TEST_UNIT_DATA` be used with the tests for each library. +> That is why we have the test data nested within a single `unit-test-data` directory. + +### Running tests + +You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. +Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable. + +### Characterisation testing { #characaterisation-testing-unit } + +See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing. + +Like with the functional characterisation, `_NIX_TEST_ACCEPT=1` is also used. +For example: +```shell-session +$ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN +... +[ SKIPPED ] WorkerProtoTest.string_read +[ SKIPPED ] WorkerProtoTest.string_write +[ SKIPPED ] WorkerProtoTest.storePath_read +[ SKIPPED ] WorkerProtoTest.storePath_write +... +``` +will regenerate the "golden master" expected result for the `libnixstore` characterisation tests. +The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything. ## Functional tests -The functional tests reside under the `tests` directory and are listed in `tests/local.mk`. +The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`. Each test is a bash script. ### Running the whole test suite @@ -20,8 +91,8 @@ The whole test suite can be run with: ```shell-session $ make install && make installcheck -ran test tests/foo.sh... [PASS] -ran test tests/bar.sh... [PASS] +ran test tests/functional/foo.sh... [PASS] +ran test tests/functional/bar.sh... [PASS] ... ``` @@ -29,14 +100,14 @@ ran test tests/bar.sh... [PASS] Sometimes it is useful to group related tests so they can be easily run together without running the entire test suite. Each test group is in a subdirectory of `tests`. -For example, `tests/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs. +For example, `tests/functional/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs. That test group can be run like this: ```shell-session $ make ca.test-group -j50 -ran test tests/ca/nix-run.sh... [PASS] -ran test tests/ca/import-derivation.sh... [PASS] +ran test tests/functional/ca/nix-run.sh... [PASS] +ran test tests/functional/ca/import-derivation.sh... [PASS] ... ``` @@ -55,21 +126,21 @@ install-tests-groups += $(test-group-name) Individual tests can be run with `make`: ```shell-session -$ make tests/${testName}.sh.test -ran test tests/${testName}.sh... [PASS] +$ make tests/functional/${testName}.sh.test +ran test tests/functional/${testName}.sh... [PASS] ``` or without `make`: ```shell-session -$ ./mk/run-test.sh tests/${testName}.sh -ran test tests/${testName}.sh... [PASS] +$ ./mk/run-test.sh tests/functional/${testName}.sh +ran test tests/functional/${testName}.sh... [PASS] ``` To see the complete output, one can also run: ```shell-session -$ ./mk/debug-test.sh tests/${testName}.sh +$ ./mk/debug-test.sh tests/functional/${testName}.sh + foo output from foo + bar @@ -104,7 +175,7 @@ edit it like so: Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point: ```shell-session -$ ./mk/debug-test.sh tests/${testName}.sh +$ ./mk/debug-test.sh tests/functional/${testName}.sh ... + gdb blash blub GNU gdb (GDB) 12.1 @@ -115,17 +186,29 @@ GNU gdb (GDB) 12.1 One can debug the Nix invocation in all the usual ways. For example, enter `run` to start the Nix invocation. -### Characterization testing +### Troubleshooting -Occasionally, Nix utilizes a technique called [Characterization Testing](https://en.wikipedia.org/wiki/Characterization_test) as part of the functional tests. +Sometimes running tests in the development shell may leave artefacts in the local repository. +To remove any traces of that: + +```console +git clean -x --force tests +``` + +### Characterisation testing { #characterisation-testing-functional } + +Occasionally, Nix utilizes a technique called [Characterisation Testing](https://en.wikipedia.org/wiki/Characterization_test) as part of the functional tests. This technique is to include the exact output/behavior of a former version of Nix in a test in order to check that Nix continues to produce the same behavior going forward. For example, this technique is used for the language tests, to check both the printed final value if evaluation was successful, and any errors and warnings encountered. It is frequently useful to regenerate the expected output. -To do that, rerun the failed test with `_NIX_TEST_ACCEPT=1`. -(At least, this is the convention we've used for `tests/lang.sh`. -If we add more characterization testing we should always strive to be consistent.) +To do that, rerun the failed test(s) with `_NIX_TEST_ACCEPT=1`. +For example: +```bash +_NIX_TEST_ACCEPT=1 make tests/functional/lang.sh.test +``` +This convention is shared with the [characterisation unit tests](#characterisation-testing-unit) too. An interesting situation to document is the case when these tests are "overfitted". The language tests are, again, an example of this. @@ -138,7 +221,7 @@ Diagnostic outputs are indeed not a stable interface, but they still are importa By recording the expected output, the test suite guards against accidental changes, and ensure the *result* (not just the code that implements it) of the diagnostic code paths are under code review. Regressions are caught, and improvements always show up in code review. -To ensure that characterization testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`. +To ensure that characterisation testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`. ## Integration tests @@ -152,7 +235,7 @@ You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix- After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch. -Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91): +Creating a Cachix cache for your installer tests and adding its authorisation token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91): - The `installer` job generates installers for the platforms below and uploads them to your Cachix cache: - `x86_64-linux` diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index ac0bb3c2f..d49d5e52e 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -1,236 +1,283 @@ # Glossary - - [derivation]{#gloss-derivation}\ - A description of a build task. The result of a derivation is a - store object. Derivations are typically specified in Nix expressions - using the [`derivation` primitive](./language/derivations.md). These are - translated into low-level *store derivations* (implicitly by - `nix-env` and `nix-build`, or explicitly by `nix-instantiate`). +- [derivation]{#gloss-derivation} - [derivation]: #gloss-derivation + A description of a build task. The result of a derivation is a + store object. Derivations are typically specified in Nix expressions + using the [`derivation` primitive](./language/derivations.md). These are + translated into low-level *store derivations* (implicitly by + `nix-env` and `nix-build`, or explicitly by `nix-instantiate`). - - [store derivation]{#gloss-store-derivation}\ - A [derivation] represented as a `.drv` file in the [store]. - It has a [store path], like any [store object]. + [derivation]: #gloss-derivation - Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv` +- [store derivation]{#gloss-store-derivation} - See [`nix derivation show`](./command-ref/new-cli/nix3-derivation-show.md) (experimental) for displaying the contents of store derivations. + A [derivation] represented as a `.drv` file in the [store]. + It has a [store path], like any [store object]. - [store derivation]: #gloss-store-derivation + Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv` - - [instantiate]{#gloss-instantiate}, instantiation\ - Translate a [derivation] into a [store derivation]. + See [`nix derivation show`](./command-ref/new-cli/nix3-derivation-show.md) (experimental) for displaying the contents of store derivations. - See [`nix-instantiate`](./command-ref/nix-instantiate.md). + [store derivation]: #gloss-store-derivation - [instantiate]: #gloss-instantiate +- [instantiate]{#gloss-instantiate}, instantiation - - [realise]{#gloss-realise}, realisation\ - Ensure a [store path] is [valid][validity]. + Translate a [derivation] into a [store derivation]. - This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter]. + See [`nix-instantiate`](./command-ref/nix-instantiate.md). - See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md). + [instantiate]: #gloss-instantiate - See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental). +- [realise]{#gloss-realise}, realisation - [realise]: #gloss-realise + Ensure a [store path] is [valid][validity]. - - [content-addressed derivation]{#gloss-content-addressed-derivation}\ - A derivation which has the - [`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed) - attribute set to `true`. + This can be achieved by: + - Fetching a pre-built [store object] from a [substituter] + - Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation] + - Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs + - - [fixed-output derivation]{#gloss-fixed-output-derivation}\ - A derivation which includes the - [`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute. + See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm. - - [store]{#gloss-store}\ - The location in the file system where store objects live. Typically - `/nix/store`. + See also [`nix-build`](./command-ref/nix-build.md) and [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental). - From the perspective of the location where Nix is - invoked, the Nix store can be referred to - as a "_local_" or a "_remote_" one: + [realise]: #gloss-realise - + A [local store]{#gloss-local-store} exists on the filesystem of - the machine where Nix is invoked. You can use other - local stores by passing the `--store` flag to the - `nix` command. Local stores can be used for building derivations. +- [content-addressed derivation]{#gloss-content-addressed-derivation} - + A *remote store* exists anywhere other than the - local filesystem. One example is the `/nix/store` - directory on another machine, accessed via `ssh` or - served by the `nix-serve` Perl script. + A derivation which has the + [`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed) + attribute set to `true`. - [store]: #gloss-store - [local store]: #gloss-local-store +- [fixed-output derivation]{#gloss-fixed-output-derivation} - - [chroot store]{#gloss-chroot-store}\ - A [local store] whose canonical path is anything other than `/nix/store`. + A derivation which includes the + [`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute. - - [binary cache]{#gloss-binary-cache}\ - A *binary cache* is a Nix store which uses a different format: its - metadata and signatures are kept in `.narinfo` files rather than in a - [Nix database]. This different format simplifies serving store objects - over the network, but cannot host builds. Examples of binary caches - include S3 buckets and the [NixOS binary cache](https://cache.nixos.org). +- [store]{#gloss-store} - - [store path]{#gloss-store-path}\ - The location of a [store object] in the file system, i.e., an - immediate child of the Nix store directory. + The location in the file system where store objects live. Typically + `/nix/store`. - Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1` + From the perspective of the location where Nix is + invoked, the Nix store can be referred to + as a "_local_" or a "_remote_" one: - [store path]: #gloss-store-path + + A [local store]{#gloss-local-store} exists on the filesystem of + the machine where Nix is invoked. You can use other + local stores by passing the `--store` flag to the + `nix` command. Local stores can be used for building derivations. - - [file system object]{#gloss-store-object}\ - The Nix data model for representing simplified file system data. + + A *remote store* exists anywhere other than the + local filesystem. One example is the `/nix/store` + directory on another machine, accessed via `ssh` or + served by the `nix-serve` Perl script. - See [File System Object](@docroot@/architecture/file-system-object.md) for details. + [store]: #gloss-store + [local store]: #gloss-local-store - [file system object]: #gloss-file-system-object +- [chroot store]{#gloss-chroot-store} - - [store object]{#gloss-store-object}\ + A [local store] whose canonical path is anything other than `/nix/store`. - A store object consists of a [file system object], [reference]s to other store objects, and other metadata. - It can be referred to by a [store path]. +- [binary cache]{#gloss-binary-cache} - [store object]: #gloss-store-object + A *binary cache* is a Nix store which uses a different format: its + metadata and signatures are kept in `.narinfo` files rather than in a + [Nix database]. This different format simplifies serving store objects + over the network, but cannot host builds. Examples of binary caches + include S3 buckets and the [NixOS binary cache](https://cache.nixos.org). - - [input-addressed store object]{#gloss-input-addressed-store-object}\ - A store object produced by building a - non-[content-addressed](#gloss-content-addressed-derivation), - non-[fixed-output](#gloss-fixed-output-derivation) - derivation. +- [store path]{#gloss-store-path} - - [output-addressed store object]{#gloss-output-addressed-store-object}\ - A [store object] whose [store path] is determined by its contents. - This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation). + The location of a [store object] in the file system, i.e., an + immediate child of the Nix store directory. - - [substitute]{#gloss-substitute}\ - A substitute is a command invocation stored in the [Nix database] that - describes how to build a store object, bypassing the normal build - mechanism (i.e., derivations). Typically, the substitute builds the - store object by downloading a pre-built version of the store object - from some server. + Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1` - - [substituter]{#gloss-substituter}\ - An additional [store]{#gloss-store} from which Nix can obtain store objects instead of building them. - Often the substituter is a [binary cache](#gloss-binary-cache), but any store can serve as substituter. + [store path]: #gloss-store-path - See the [`substituters` configuration option](./command-ref/conf-file.md#conf-substituters) for details. +- [file system object]{#gloss-store-object} - [substituter]: #gloss-substituter + The Nix data model for representing simplified file system data. - - [purity]{#gloss-purity}\ - The assumption that equal Nix derivations when run always produce - the same output. This cannot be guaranteed in general (e.g., a - builder can rely on external inputs such as the network or the - system time) but the Nix model assumes it. + See [File System Object](@docroot@/architecture/file-system-object.md) for details. - - [Nix database]{#gloss-nix-database}\ - An SQlite database to track [reference]s between [store object]s. - This is an implementation detail of the [local store]. + [file system object]: #gloss-file-system-object - Default location: `/nix/var/nix/db`. +- [store object]{#gloss-store-object} - [Nix database]: #gloss-nix-database + A store object consists of a [file system object], [reference]s to other store objects, and other metadata. + It can be referred to by a [store path]. - - [Nix expression]{#gloss-nix-expression}\ - A high-level description of software packages and compositions - thereof. Deploying software using Nix entails writing Nix - expressions for your packages. Nix expressions are translated to - derivations that are stored in the Nix store. These derivations can - then be built. + [store object]: #gloss-store-object - - [reference]{#gloss-reference}\ - A [store object] `O` is said to have a *reference* to a store object `P` if a [store path] to `P` appears in the contents of `O`. +- [IFD]{#gloss-ifd} - Store objects can refer to both other store objects and themselves. - References from a store object to itself are called *self-references*. - References other than a self-reference must not form a cycle. + [Import From Derivation](./language/import-from-derivation.md) - [reference]: #gloss-reference +- [input-addressed store object]{#gloss-input-addressed-store-object} - - [reachable]{#gloss-reachable}\ - A store path `Q` is reachable from another store path `P` if `Q` - is in the *closure* of the *references* relation. + A store object produced by building a + non-[content-addressed](#gloss-content-addressed-derivation), + non-[fixed-output](#gloss-fixed-output-derivation) + derivation. - - [closure]{#gloss-closure}\ - The closure of a store path is the set of store paths that are - directly or indirectly “reachable” from that store path; that is, - it’s the closure of the path under the *references* relation. For - a package, the closure of its derivation is equivalent to the - build-time dependencies, while the closure of its output path is - equivalent to its runtime dependencies. For correct deployment it - is necessary to deploy whole closures, since otherwise at runtime - files could be missing. The command `nix-store --query --requisites ` prints out - closures of store paths. +- [output-addressed store object]{#gloss-output-addressed-store-object} - As an example, if the [store object] at path `P` contains a [reference] - to a store object at path `Q`, then `Q` is in the closure of `P`. Further, if `Q` - references `R` then `R` is also in the closure of `P`. + A [store object] whose [store path] is determined by its contents. + This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation). - [closure]: #gloss-closure +- [substitute]{#gloss-substitute} - - [output path]{#gloss-output-path}\ - A [store path] produced by a [derivation]. + A substitute is a command invocation stored in the [Nix database] that + describes how to build a store object, bypassing the normal build + mechanism (i.e., derivations). Typically, the substitute builds the + store object by downloading a pre-built version of the store object + from some server. - [output path]: #gloss-output-path +- [substituter]{#gloss-substituter} - - [deriver]{#gloss-deriver}\ - The [store derivation] that produced an [output path]. + An additional [store]{#gloss-store} from which Nix can obtain store objects instead of building them. + Often the substituter is a [binary cache](#gloss-binary-cache), but any store can serve as substituter. - - [validity]{#gloss-validity}\ - A store path is valid if all [store object]s in its [closure] can be read from the [store]. + See the [`substituters` configuration option](./command-ref/conf-file.md#conf-substituters) for details. - For a [local store], this means: - - The store path leads to an existing [store object] in that [store]. - - The store path is listed in the [Nix database] as being valid. - - All paths in the store path's [closure] are valid. + [substituter]: #gloss-substituter - [validity]: #gloss-validity +- [purity]{#gloss-purity} - - [user environment]{#gloss-user-env}\ - An automatically generated store object that consists of a set of - symlinks to “active” applications, i.e., other store paths. These - are generated automatically by - [`nix-env`](./command-ref/nix-env.md). See *profiles*. + The assumption that equal Nix derivations when run always produce + the same output. This cannot be guaranteed in general (e.g., a + builder can rely on external inputs such as the network or the + system time) but the Nix model assumes it. - - [profile]{#gloss-profile}\ - A symlink to the current *user environment* of a user, e.g., - `/nix/var/nix/profiles/default`. +- [Nix database]{#gloss-nix-database} - - [installable]{#gloss-installable}\ - Something that can be realised in the Nix store. + An SQlite database to track [reference]s between [store object]s. + This is an implementation detail of the [local store]. - See [installables](./command-ref/new-cli/nix.md#installables) for [`nix` commands](./command-ref/new-cli/nix.md) (experimental) for details. + Default location: `/nix/var/nix/db`. - - [NAR]{#gloss-nar}\ - A *N*ix *AR*chive. This is a serialisation of a path in the Nix - store. It can contain regular files, directories and symbolic - links. NARs are generated and unpacked using `nix-store --dump` - and `nix-store --restore`. + [Nix database]: #gloss-nix-database - - [`∅`]{#gloss-emtpy-set}\ - The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile. +- [Nix expression]{#gloss-nix-expression} - - [`ε`]{#gloss-epsilon}\ - The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. + A high-level description of software packages and compositions + thereof. Deploying software using Nix entails writing Nix + expressions for your packages. Nix expressions are translated to + derivations that are stored in the Nix store. These derivations can + then be built. - - [string interpolation]{#gloss-string-interpolation}\ - Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name]. +- [reference]{#gloss-reference} - See [String interpolation](./language/string-interpolation.md) for details. + A [store object] `O` is said to have a *reference* to a store object `P` if a [store path] to `P` appears in the contents of `O`. - [string]: ./language/values.md#type-string - [path]: ./language/values.md#type-path - [attribute name]: ./language/values.md#attribute-set + Store objects can refer to both other store objects and themselves. + References from a store object to itself are called *self-references*. + References other than a self-reference must not form a cycle. - - [experimental feature]{#gloss-experimental-feature}\ - Not yet stabilized functionality guarded by named experimental feature flags. - These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting. + [reference]: #gloss-reference - See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). +- [reachable]{#gloss-reachable} + + A store path `Q` is reachable from another store path `P` if `Q` + is in the *closure* of the *references* relation. + +- [closure]{#gloss-closure} + + The closure of a store path is the set of store paths that are + directly or indirectly “reachable” from that store path; that is, + it’s the closure of the path under the *references* relation. For + a package, the closure of its derivation is equivalent to the + build-time dependencies, while the closure of its output path is + equivalent to its runtime dependencies. For correct deployment it + is necessary to deploy whole closures, since otherwise at runtime + files could be missing. The command `nix-store --query --requisites ` prints out + closures of store paths. + + As an example, if the [store object] at path `P` contains a [reference] + to a store object at path `Q`, then `Q` is in the closure of `P`. Further, if `Q` + references `R` then `R` is also in the closure of `P`. + + [closure]: #gloss-closure + +- [output]{#gloss-output} + + A [store object] produced by a [derivation]. + See [the `outputs` argument to the `derivation` function](@docroot@/language/derivations.md#attr-outputs) for details. + + [output]: #gloss-output + +- [output path]{#gloss-output-path} + + The [store path] to the [output] of a [derivation]. + + [output path]: #gloss-output-path + +- [deriver]{#gloss-deriver} + + The [store derivation] that produced an [output path]. + +- [validity]{#gloss-validity} + + A store path is valid if all [store object]s in its [closure] can be read from the [store]. + + For a [local store], this means: + - The store path leads to an existing [store object] in that [store]. + - The store path is listed in the [Nix database] as being valid. + - All paths in the store path's [closure] are valid. + + [validity]: #gloss-validity + +- [user environment]{#gloss-user-env} + + An automatically generated store object that consists of a set of + symlinks to “active” applications, i.e., other store paths. These + are generated automatically by + [`nix-env`](./command-ref/nix-env.md). See *profiles*. + +- [profile]{#gloss-profile} + + A symlink to the current *user environment* of a user, e.g., + `/nix/var/nix/profiles/default`. + +- [installable]{#gloss-installable} + + Something that can be realised in the Nix store. + + See [installables](./command-ref/new-cli/nix.md#installables) for [`nix` commands](./command-ref/new-cli/nix.md) (experimental) for details. + +- [NAR]{#gloss-nar} + + A *N*ix *AR*chive. This is a serialisation of a path in the Nix + store. It can contain regular files, directories and symbolic + links. NARs are generated and unpacked using `nix-store --dump` + and `nix-store --restore`. + +- [`∅`]{#gloss-emtpy-set} + + The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile. + +- [`ε`]{#gloss-epsilon} + + The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. + +- [string interpolation]{#gloss-string-interpolation} + + Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name]. + + See [String interpolation](./language/string-interpolation.md) for details. + + [string]: ./language/values.md#type-string + [path]: ./language/values.md#type-path + [attribute name]: ./language/values.md#attribute-set + +- [experimental feature]{#gloss-experimental-feature} + + Not yet stabilized functionality guarded by named experimental feature flags. + These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting. + + See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). diff --git a/doc/manual/src/installation/building-source.md b/doc/manual/src/installation/building-source.md index ed1efffd8..7dad9805a 100644 --- a/doc/manual/src/installation/building-source.md +++ b/doc/manual/src/installation/building-source.md @@ -3,7 +3,7 @@ After cloning Nix's Git repository, issue the following commands: ```console -$ ./bootstrap.sh +$ autoreconf -vfi $ ./configure options... $ make $ make install diff --git a/doc/manual/src/installation/installing-docker.md b/doc/manual/src/installation/installing-docker.md index 9d6d8f2d9..6f77d6a57 100644 --- a/doc/manual/src/installation/installing-docker.md +++ b/doc/manual/src/installation/installing-docker.md @@ -3,14 +3,14 @@ To run the latest stable release of Nix with Docker run the following command: ```console -$ docker run -ti nixos/nix -Unable to find image 'nixos/nix:latest' locally -latest: Pulling from nixos/nix +$ docker run -ti ghcr.io/nixos/nix +Unable to find image 'ghcr.io/nixos/nix:latest' locally +latest: Pulling from ghcr.io/nixos/nix 5843afab3874: Pull complete b52bf13f109c: Pull complete 1e2415612aa3: Pull complete Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff -Status: Downloaded newer image for nixos/nix:latest +Status: Downloaded newer image for ghcr.io/nixos/nix:latest 35ca4ada6e96:/# nix --version nix (Nix) 2.3.12 35ca4ada6e96:/# exit diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md index 5e8aaeba0..282b75af2 100644 --- a/doc/manual/src/language/advanced-attributes.md +++ b/doc/manual/src/language/advanced-attributes.md @@ -112,6 +112,13 @@ Derivations can declare some infrequently used optional attributes. > environmental variables come from the environment of the > `nix-build`. + If the [`configurable-impure-env` experimental + feature](@docroot@/contributing/experimental-features.md#xp-feature-configurable-impure-env) + is enabled, these environment variables can also be controlled + through the + [`impure-env`](@docroot@/command-ref/conf-file.md#conf-impure-env) + configuration setting. + - [`outputHash`]{#adv-attr-outputHash}; [`outputHashAlgo`]{#adv-attr-outputHashAlgo}; [`outputHashMode`]{#adv-attr-outputHashMode}\ These attributes declare that the derivation is a so-called *fixed-output derivation*, which means that a cryptographic hash of @@ -229,6 +236,8 @@ Derivations can declare some infrequently used optional attributes. [`outputHashAlgo`](#adv-attr-outputHashAlgo) like for *fixed-output derivations* (see above). + It also implicitly requires that the machine to build the derivation must have the `ca-derivations` [system feature](@docroot@/command-ref/conf-file.md#conf-system-features). + - [`passAsFile`]{#adv-attr-passAsFile}\ A list of names of attributes that should be passed via files rather than environment variables. For example, if you have @@ -261,6 +270,9 @@ Derivations can declare some infrequently used optional attributes. useful for very trivial derivations (such as `writeText` in Nixpkgs) that are cheaper to build than to substitute from a binary cache. + You may disable the effects of this attibute by enabling the + `always-allow-substitutes` configuration option in Nix. + > **Note** > > You need to have a builder configured which satisfies the @@ -271,18 +283,21 @@ Derivations can declare some infrequently used optional attributes. - [`__structuredAttrs`]{#adv-attr-structuredAttrs}\ If the special attribute `__structuredAttrs` is set to `true`, the other derivation - attributes are serialised in JSON format and made available to the - builder via the file `.attrs.json` in the builder’s temporary - directory. This obviates the need for [`passAsFile`](#adv-attr-passAsFile) since JSON files - have no size restrictions, unlike process environments. + attributes are serialised into a file in JSON format. The environment variable + `NIX_ATTRS_JSON_FILE` points to the exact location of that file both in a build + and a [`nix-shell`](../command-ref/nix-shell.md). This obviates the need for + [`passAsFile`](#adv-attr-passAsFile) since JSON files have no size restrictions, + unlike process environments. It also makes it possible to tweak derivation settings in a structured way; see [`outputChecks`](#adv-attr-outputChecks) for example. As a convenience to Bash builders, - Nix writes a script named `.attrs.sh` to the builder’s directory - that initialises shell variables corresponding to all attributes - that are representable in Bash. This includes non-nested + Nix writes a script that initialises shell variables + corresponding to all attributes that are representable in Bash. The + environment variable `NIX_ATTRS_SH_FILE` points to the exact + location of the script, both in a build and a + [`nix-shell`](../command-ref/nix-shell.md). This includes non-nested (associative) arrays. For example, the attribute `hardening.format = true` ends up as the Bash associative array element `${hardening[format]}`. @@ -335,3 +350,15 @@ Derivations can declare some infrequently used optional attributes. This is useful, for example, when generating self-contained filesystem images with their own embedded Nix store: hashes found inside such an image refer to the embedded store and not to the host's Nix store. + +- [`requiredSystemFeatures`]{#adv-attr-requiredSystemFeatures}\ + + If a derivation has the `requiredSystemFeatures` attribute, then Nix will only build it on a machine that has the corresponding features set in its [`system-features` configuration](@docroot@/command-ref/conf-file.md#conf-system-features). + + For example, setting + + ```nix + requiredSystemFeatures = [ "kvm" ]; + ``` + + ensures that the derivation can only be built on a machine with the `kvm` feature. diff --git a/doc/manual/src/language/constructs/lookup-path.md b/doc/manual/src/language/constructs/lookup-path.md new file mode 100644 index 000000000..e87d2922b --- /dev/null +++ b/doc/manual/src/language/constructs/lookup-path.md @@ -0,0 +1,27 @@ +# Lookup path + +> **Syntax** +> +> *lookup-path* = `<` *identifier* [ `/` *identifier* ]... `>` + +A lookup path is an identifier with an optional path suffix that resolves to a [path value](@docroot@/language/values.md#type-path) if the identifier matches a search path entry. + +The value of a lookup path is determined by [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath). + +See [`builtins.findFile`](@docroot@/language/builtins.md#builtins-findFile) for details on lookup path resolution. + +> **Example** +> +> ```nix +> +>``` +> +> /nix/var/nix/profiles/per-user/root/channels/nixpkgs + +> **Example** +> +> ```nix +> +>``` +> +> /nix/var/nix/profiles/per-user/root/channels/nixpkgs/nixos diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md index 043a38191..2aded5527 100644 --- a/doc/manual/src/language/derivations.md +++ b/doc/manual/src/language/derivations.md @@ -1,161 +1,315 @@ # Derivations -The most important built-in function is `derivation`, which is used to -describe a single derivation (a build task). It takes as input a set, -the attributes of which specify the inputs of the build. +The most important built-in function is `derivation`, which is used to describe a single derivation: +a specification for running an executable on precisely defined input files to repeatably produce output files at uniquely determined file system paths. - - There must be an attribute named [`system`]{#attr-system} whose value must be a - string specifying a Nix system type, such as `"i686-linux"` or - `"x86_64-darwin"`. (To figure out your system type, run `nix -vv - --version`.) The build can only be performed on a machine and - operating system matching the system type. (Nix can automatically - [forward builds for other - platforms](../advanced-topics/distributed-builds.md) by forwarding - them to other machines.) +It takes as input an attribute set, the attributes of which specify the inputs to the process. +It outputs an attribute set, and produces a [store derivation] as a side effect of evaluation. - - There must be an attribute named `name` whose value must be a - string. This is used as a symbolic name for the package by - `nix-env`, and it is appended to the output paths of the derivation. +[store derivation]: @docroot@/glossary.md#gloss-store-derivation - - There must be an attribute named `builder` that identifies the - program that is executed to perform the build. It can be either a - derivation or a source (a local file reference, e.g., - `./builder.sh`). +## Input attributes - - Every attribute is passed as an environment variable to the builder. - Attribute values are translated to environment variables as follows: - - - Strings and numbers are just passed verbatim. - - - A *path* (e.g., `../foo/sources.tar`) causes the referenced file - to be copied to the store; its location in the store is put in - the environment variable. The idea is that all sources should - reside in the Nix store, since all inputs to a derivation should - reside in the Nix store. - - - A *derivation* causes that derivation to be built prior to the - present derivation; its default output path is put in the - environment variable. - - - Lists of the previous types are also allowed. They are simply - concatenated, separated by spaces. - - - `true` is passed as the string `1`, `false` and `null` are - passed as an empty string. +### Required - - The optional attribute `args` specifies command-line arguments to be - passed to the builder. It should be a list. +- [`name`]{#attr-name} ([String](@docroot@/language/values.md#type-string)) - - The optional attribute `outputs` specifies a list of symbolic - outputs of the derivation. By default, a derivation produces a - single output path, denoted as `out`. However, derivations can - produce multiple output paths. This is useful because it allows - outputs to be downloaded or garbage-collected separately. For - instance, imagine a library package that provides a dynamic library, - header files, and documentation. A program that links against the - library doesn’t need the header files and documentation at runtime, - and it doesn’t need the documentation at build time. Thus, the - library package could specify: - - ```nix - outputs = [ "lib" "headers" "doc" ]; - ``` - - This will cause Nix to pass environment variables `lib`, `headers` - and `doc` to the builder containing the intended store paths of each - output. The builder would typically do something like - - ```bash - ./configure \ - --libdir=$lib/lib \ - --includedir=$headers/include \ - --docdir=$doc/share/doc - ``` - - for an Autoconf-style package. You can refer to each output of a - derivation by selecting it as an attribute, e.g. - - ```nix - buildInputs = [ pkg.lib pkg.headers ]; - ``` - - The first element of `outputs` determines the *default output*. - Thus, you could also write - - ```nix - buildInputs = [ pkg pkg.headers ]; - ``` - - since `pkg` is equivalent to `pkg.lib`. + A symbolic name for the derivation. + It is added to the [store path] of the corresponding [store derivation] as well as to its [output paths](@docroot@/glossary.md#gloss-output-path). -The function `mkDerivation` in the Nixpkgs standard environment is a -wrapper around `derivation` that adds a default value for `system` and -always uses Bash as the builder, to which the supplied builder is passed -as a command-line argument. See the Nixpkgs manual for details. + [store path]: @docroot@/glossary.md#gloss-store-path -The builder is executed as follows: + > **Example** + > + > ```nix + > derivation { + > name = "hello"; + > # ... + > } + > ``` + > + > The store derivation's path will be `/nix/store/-hello.drv`. + > The [output](#attr-outputs) paths will be of the form `/nix/store/-hello[-]` - - A temporary directory is created under the directory specified by - `TMPDIR` (default `/tmp`) where the build will take place. The - current directory is changed to this directory. +- [`system`]{#attr-system} ([String](@docroot@/language/values.md#type-string)) - - The environment is cleared and set to the derivation attributes, as - specified above. + The system type on which the [`builder`](#attr-builder) executable is meant to be run. - - In addition, the following variables are set: - - - `NIX_BUILD_TOP` contains the path of the temporary directory for - this build. - - - Also, `TMPDIR`, `TEMPDIR`, `TMP`, `TEMP` are set to point to the - temporary directory. This is to prevent the builder from - accidentally writing temporary files anywhere else. Doing so - might cause interference by other processes. - - - `PATH` is set to `/path-not-set` to prevent shells from - initialising it to their built-in default value. - - - `HOME` is set to `/homeless-shelter` to prevent programs from - using `/etc/passwd` or the like to find the user's home - directory, which could cause impurity. Usually, when `HOME` is - set, it is used as the location of the home directory, even if - it points to a non-existent path. - - - `NIX_STORE` is set to the path of the top-level Nix store - directory (typically, `/nix/store`). - - - For each output declared in `outputs`, the corresponding - environment variable is set to point to the intended path in the - Nix store for that output. Each output path is a concatenation - of the cryptographic hash of all build inputs, the `name` - attribute and the output name. (The output name is omitted if - it’s `out`.) + A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option]. + It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines. - - If an output path already exists, it is removed. Also, locks are - acquired to prevent multiple Nix instances from performing the same - build at the same time. + [`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system - - A log of the combined standard output and error is written to - `/nix/var/log/nix`. + > **Example** + > + > Declare a derivation to be built on a specific system type: + > + > ```nix + > derivation { + > # ... + > system = "x86_64-linux"; + > # ... + > } + > ``` - - The builder is executed with the arguments specified by the - attribute `args`. If it exits with exit code 0, it is considered to - have succeeded. + > **Example** + > + > Declare a derivation to be built on the system type that evaluates the expression: + > + > ```nix + > derivation { + > # ... + > system = builtins.currentSystem; + > # ... + > } + > ``` + > + > [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) has the value of the [`system` configuration option], and defaults to the system type of the current Nix installation. - - The temporary directory is removed (unless the `-K` option was - specified). +- [`builder`]{#attr-builder} ([Path](@docroot@/language/values.md#type-path) | [String](@docroot@/language/values.md#type-string)) - - If the build was successful, Nix scans each output path for - references to input paths by looking for the hash parts of the input - paths. Since these are potential runtime dependencies, Nix registers - them as dependencies of the output paths. + Path to an executable that will perform the build. - - After the build, Nix sets the last-modified timestamp on all files - in the build result to 1 (00:00:01 1/1/1970 UTC), sets the group to - the default group, and sets the mode of the file to 0444 or 0555 - (i.e., read-only, with execute permission enabled if the file was - originally executable). Note that possible `setuid` and `setgid` - bits are cleared. Setuid and setgid programs are not currently - supported by Nix. This is because the Nix archives used in - deployment have no concept of ownership information, and because it - makes the build result dependent on the user performing the build. + > **Example** + > + > Use the file located at `/bin/bash` as the builder executable: + > + > ```nix + > derivation { + > # ... + > builder = "/bin/bash"; + > # ... + > }; + > ``` + + + + > **Example** + > + > Copy a local file to the Nix store for use as the builder executable: + > + > ```nix + > derivation { + > # ... + > builder = ./builder.sh; + > # ... + > }; + > ``` + + + + > **Example** + > + > Use a file from another derivation as the builder executable: + > + > ```nix + > let pkgs = import {}; in + > derivation { + > # ... + > builder = "${pkgs.python}/bin/python"; + > # ... + > }; + > ``` + +### Optional + +- [`args`]{#attr-args} ([List](@docroot@/language/values.md#list) of [String](@docroot@/language/values.md#type-string)) + + Default: `[ ]` + + Command-line arguments to be passed to the [`builder`](#attr-builder) executable. + + > **Example** + > + > Pass arguments to Bash to interpret a shell command: + > + > ```nix + > derivation { + > # ... + > builder = "/bin/bash"; + > args = [ "-c" "echo hello world > $out" ]; + > # ... + > }; + > ``` + +- [`outputs`]{#attr-outputs} ([List](@docroot@/language/values.md#list) of [String](@docroot@/language/values.md#type-string)) + + Default: `[ "out" ]` + + Symbolic outputs of the derivation. + Each output name is passed to the [`builder`](#attr-builder) executable as an environment variable with its value set to the corresponding [store path]. + + By default, a derivation produces a single output called `out`. + However, derivations can produce multiple outputs. + This allows the associated [store objects](@docroot@/glossary.md#gloss-store-object) and their [closures](@docroot@/glossary.md#gloss-closure) to be copied or garbage-collected separately. + + > **Example** + > + > Imagine a library package that provides a dynamic library, header files, and documentation. + > A program that links against such a library doesn’t need the header files and documentation at runtime, and it doesn’t need the documentation at build time. + > Thus, the library package could specify: + > + > ```nix + > derivation { + > # ... + > outputs = [ "lib" "dev" "doc" ]; + > # ... + > } + > ``` + > + > This will cause Nix to pass environment variables `lib`, `dev`, and `doc` to the builder containing the intended store paths of each output. + > The builder would typically do something like + > + > ```bash + > ./configure \ + > --libdir=$lib/lib \ + > --includedir=$dev/include \ + > --docdir=$doc/share/doc + > ``` + > + > for an Autoconf-style package. + + The name of an output is combined with the name of the derivation to create the name part of the output's store path, unless it is `out`, in which case just the name of the derivation is used. + + > **Example** + > + > + > ```nix + > derivation { + > name = "example"; + > outputs = [ "lib" "dev" "doc" "out" ]; + > # ... + > } + > ``` + > + > The store derivation path will be `/nix/store/-example.drv`. + > The output paths will be + > - `/nix/store/-example-lib` + > - `/nix/store/-example-dev` + > - `/nix/store/-example-doc` + > - `/nix/store/-example` + + You can refer to each output of a derivation by selecting it as an attribute. + The first element of `outputs` determines the *default output* and ends up at the top-level. + + > **Example** + > + > Select an output by attribute name: + > + > ```nix + > let + > myPackage = derivation { + > name = "example"; + > outputs = [ "lib" "dev" "doc" "out" ]; + > # ... + > }; + > in myPackage.dev + > ``` + > + > Since `lib` is the first output, `myPackage` is equivalent to `myPackage.lib`. + + + +- See [Advanced Attributes](./advanced-attributes.md) for more, infrequently used, optional attributes. + + + +- Every other attribute is passed as an environment variable to the builder. + Attribute values are translated to environment variables as follows: + + - Strings are passed unchanged. + + - Integral numbers are converted to decimal notation. + + - Floating point numbers are converted to simple decimal or scientific notation with a preset precision. + + - A *path* (e.g., `../foo/sources.tar`) causes the referenced file + to be copied to the store; its location in the store is put in + the environment variable. The idea is that all sources should + reside in the Nix store, since all inputs to a derivation should + reside in the Nix store. + + - A *derivation* causes that derivation to be built prior to the + present derivation. The environment variable is set to the [store path] of the derivation's default [output](#attr-outputs). + + - Lists of the previous types are also allowed. They are simply + concatenated, separated by spaces. + + - `true` is passed as the string `1`, `false` and `null` are + passed as an empty string. + + + +## Builder execution + +The [`builder`](#attr-builder) is executed as follows: + +- A temporary directory is created under the directory specified by + `TMPDIR` (default `/tmp`) where the build will take place. The + current directory is changed to this directory. + +- The environment is cleared and set to the derivation attributes, as + specified above. + +- In addition, the following variables are set: + + - `NIX_BUILD_TOP` contains the path of the temporary directory for + this build. + + - Also, `TMPDIR`, `TEMPDIR`, `TMP`, `TEMP` are set to point to the + temporary directory. This is to prevent the builder from + accidentally writing temporary files anywhere else. Doing so + might cause interference by other processes. + + - `PATH` is set to `/path-not-set` to prevent shells from + initialising it to their built-in default value. + + - `HOME` is set to `/homeless-shelter` to prevent programs from + using `/etc/passwd` or the like to find the user's home + directory, which could cause impurity. Usually, when `HOME` is + set, it is used as the location of the home directory, even if + it points to a non-existent path. + + - `NIX_STORE` is set to the path of the top-level Nix store + directory (typically, `/nix/store`). + + - `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs` + is set to `true` for the dervation. A detailed explanation of this + behavior can be found in the + [section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs). + + - For each output declared in `outputs`, the corresponding + environment variable is set to point to the intended path in the + Nix store for that output. Each output path is a concatenation + of the cryptographic hash of all build inputs, the `name` + attribute and the output name. (The output name is omitted if + it’s `out`.) + +- If an output path already exists, it is removed. Also, locks are + acquired to prevent multiple Nix instances from performing the same + build at the same time. + +- A log of the combined standard output and error is written to + `/nix/var/log/nix`. + +- The builder is executed with the arguments specified by the + attribute `args`. If it exits with exit code 0, it is considered to + have succeeded. + +- The temporary directory is removed (unless the `-K` option was + specified). + +- If the build was successful, Nix scans each output path for + references to input paths by looking for the hash parts of the input + paths. Since these are potential runtime dependencies, Nix registers + them as dependencies of the output paths. + +- After the build, Nix sets the last-modified timestamp on all files + in the build result to 1 (00:00:01 1/1/1970 UTC), sets the group to + the default group, and sets the mode of the file to 0444 or 0555 + (i.e., read-only, with execute permission enabled if the file was + originally executable). Note that possible `setuid` and `setgid` + bits are cleared. Setuid and setgid programs are not currently + supported by Nix. This is because the Nix archives used in + deployment have no concept of ownership information, and because it + makes the build result dependent on the user performing the build. diff --git a/doc/manual/src/language/import-from-derivation.md b/doc/manual/src/language/import-from-derivation.md new file mode 100644 index 000000000..03b3f9d91 --- /dev/null +++ b/doc/manual/src/language/import-from-derivation.md @@ -0,0 +1,139 @@ +# Import From Derivation + +The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object). + +Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD): + +- [`import`](./builtins.md#builtins-import)` expr` +- [`builtins.readFile`](./builtins.md#builtins-readFile)` expr` +- [`builtins.readFileType`](./builtins.md#builtins-readFileType)` expr` +- [`builtins.readDir`](./builtins.md#builtins-readDir)` expr` +- [`builtins.pathExists`](./builtins.md#builtins-pathExists)` expr` +- [`builtins.filterSource`](./builtins.md#builtins-filterSource)` f expr` +- [`builtins.path`](./builtins.md#builtins-path)` { path = expr; }` +- [`builtins.hashFile`](./builtins.md#builtins-hashFile)` t expr` +- `builtins.scopedImport x drv` + +When the store path needs to be accessed, evaluation will be paused, the corresponding store object [realised], and then evaluation resumed. + +[realised]: @docroot@/glossary.md#gloss-realise + +This has performance implications: +Evaluation can only finish when all required store objects are realised. +Since the Nix language evaluator is sequential, it only finds store paths to read from one at a time. +While realisation is always parallel, in this case it cannot be done for all required store paths at once, and is therefore much slower than otherwise. + +Realising store objects during evaluation can be disabled by setting [`allow-import-from-derivation`](../command-ref/conf-file.md#conf-allow-import-from-derivation) to `false`. +Without IFD it is ensured that evaluation is complete and Nix can produce a build plan before starting any realisation. + +## Example + +In the following Nix expression, the inner derivation `drv` produces a file with contents `hello`. + +```nix +# IFD.nix +let + drv = derivation { + name = "hello"; + builder = "/bin/sh"; + args = [ "-c" "echo -n hello > $out" ]; + system = builtins.currentSystem; + }; +in "${builtins.readFile drv} world" +``` + +```shellSession +nix-instantiate IFD.nix --eval --read-write-mode +``` + +``` +building '/nix/store/348q1cal6sdgfxs8zqi9v8llrsn4kqkq-hello.drv'... +"hello world" +``` + +The contents of the derivation's output have to be [realised] before they can be read with [`readFile`](./builtins.md#builtins-readFile). +Only then evaluation can continue to produce the final result. + +## Illustration + +As a first approximation, the following data flow graph shows how evaluation and building are interleaved, if the value of a Nix expression depends on realising a [store object]. +Boxes are data structures, arrow labels are transformations. + +``` ++----------------------+ +------------------------+ +| Nix evaluator | | Nix store | +| .----------------. | | | +| | Nix expression | | | | +| '----------------' | | | +| | | | | +| evaluate | | | +| | | | | +| V | | | +| .------------. | | .------------------. | +| | derivation |----|-instantiate-|->| store derivation | | +| '------------' | | '------------------' | +| | | | | +| | | realise | +| | | | | +| | | V | +| .----------------. | | .--------------. | +| | Nix expression |<-|----read-----|----| store object | | +| '----------------' | | '--------------' | +| | | | | +| evaluate | | | +| | | | | +| V | | | +| .------------. | | | +| | value | | | | +| '------------' | | | ++----------------------+ +------------------------+ +``` + +In more detail, the following sequence diagram shows how the expression is evaluated step by step, and where evaluation is blocked to wait for the build output to appear. + +``` +.-------. .-------------. .---------. +|Nix CLI| |Nix evaluator| |Nix store| +'-------' '-------------' '---------' + | | | + |evaluate IFD.nix| | + |--------------->| | + | | | + | evaluate `"${readFile drv} world"` | + | | | + | evaluate `readFile drv` | + | | | + | evaluate `drv` as string | + | | | + | |instantiate /nix/store/...-hello.drv| + | |----------------------------------->| + | : | + | : realise /nix/store/...-hello.drv | + | :----------------------------------->| + | : | + | |--------. + | : | | + | (evaluation blocked) | echo hello > $out + | : | | + | |<-------' + | : /nix/store/...-hello | + | |<-----------------------------------| + | | | + | resume `readFile /nix/store/...-hello` | + | | | + | | readFile /nix/store/...-hello | + | |----------------------------------->| + | | | + | | hello | + | |<-----------------------------------| + | | | + | resume `"${"hello"} world"` | + | | | + | resume `"hello world"` | + | | | + | "hello world" | | + |<---------------| | +.-------. .-------------. .---------. +|Nix CLI| |Nix evaluator| |Nix store| +'-------' '-------------' '---------' +``` diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md index 29950a52d..a26e43a05 100644 --- a/doc/manual/src/language/index.md +++ b/doc/manual/src/language/index.md @@ -83,7 +83,8 @@ This is an incomplete overview of language features, by example. - A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n string"`. + + A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n  string"`. diff --git a/doc/manual/src/language/operators.md b/doc/manual/src/language/operators.md index f8382ae19..cc825b4cf 100644 --- a/doc/manual/src/language/operators.md +++ b/doc/manual/src/language/operators.md @@ -35,6 +35,8 @@ ## Attribute selection +> **Syntax** +> > *attrset* `.` *attrpath* \[ `or` *expr* \] Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*. @@ -42,21 +44,29 @@ If the attribute doesn’t exist, return the *expr* after `or` if provided, othe An attribute path is a dot-separated list of [attribute names](./values.md#attribute-set). +> **Syntax** +> > *attrpath* = *name* [ `.` *name* ]... [Attribute selection]: #attribute-selection ## Has attribute +> **Syntax** +> > *attrset* `?` *attrpath* Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*. The result is a [Boolean] value. +See also: [`builtins.hasAttr`](@docroot@/language/builtins.md#builtins-hasAttr) + [Boolean]: ./values.md#type-boolean [Has attribute]: #has-attribute +After evaluating *attrset* and *attrpath*, the computational complexity is O(log(*n*)) for *n* attributes in the *attrset* + ## Arithmetic Numbers are type-compatible: @@ -70,6 +80,8 @@ The `+` operator is overloaded to also work on strings and paths. ## String concatenation +> **Syntax** +> > *string* `+` *string* Concatenate two [string]s and merge their string contexts. @@ -78,6 +90,8 @@ Concatenate two [string]s and merge their string contexts. ## Path concatenation +> **Syntax** +> > *path* `+` *path* Concatenate two [path]s. @@ -87,6 +101,8 @@ The result is a path. ## Path and string concatenation +> **Syntax** +> > *path* + *string* Concatenate *[path]* with *[string]*. @@ -100,6 +116,8 @@ The result is a path. ## String and path concatenation +> **Syntax** +> > *string* + *path* Concatenate *[string]* with *[path]*. @@ -117,6 +135,8 @@ The result is a string. ## Update +> **Syntax** +> > *attrset1* // *attrset2* Update [attribute set] *attrset1* with names and values from *attrset2*. diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index ddc6b8230..e999b287b 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -1,19 +1,12 @@ # String interpolation -String interpolation is a language feature where a [string], [path], or [attribute name] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets). +String interpolation is a language feature where a [string], [path], or [attribute name][attribute set] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets). -Such a string is an *interpolated string*, and an expression inside is an *interpolated expression*. - -Interpolated expressions must evaluate to one of the following: - -- a [string] -- a [path] -- a [derivation] +Such a construct is called *interpolated string*, and the expression inside is an [interpolated expression](#interpolated-expression). [string]: ./values.md#type-string [path]: ./values.md#type-path -[attribute name]: ./values.md#attribute-set -[derivation]: ../glossary.md#gloss-derivation +[attribute set]: ./values.md#attribute-set ## Examples @@ -70,13 +63,136 @@ you can instead write ### Attribute name -Attribute names can be created dynamically with string interpolation: + -```nix -let name = "foo"; in -{ - ${name} = "bar"; -} -``` +Attribute names can be interpolated strings. - { foo = "bar"; } +> **Example** +> +> ```nix +> let name = "foo"; in +> { ${name} = 123; } +> ``` +> +> { foo = 123; } + +Attributes can be selected with interpolated strings. + +> **Example** +> +> ```nix +> let name = "foo"; in +> { foo = 123; }.${name} +> ``` +> +> 123 + +# Interpolated expression + +An expression that is interpolated must evaluate to one of the following: + +- a [string] +- a [path] +- an [attribute set] that has a `__toString` attribute or an `outPath` attribute + + - `__toString` must be a function that takes the attribute set itself and returns a string + - `outPath` must be a string + + This includes [derivations](./derivations.md) or [flake inputs](@docroot@/command-ref/new-cli/nix3-flake.md#flake-inputs) (experimental). + +A string interpolates to itself. + +A path in an interpolated expression is first copied into the Nix store, and the resulting string is the [store path] of the newly created [store object](../glossary.md#gloss-store-object). + +[store path]: ../glossary.md#gloss-store-path + +> **Example** +> +> ```console +> $ mkdir foo +> ``` +> +> Reference the empty directory in an interpolated expression: +> +> ```nix +> "${./foo}" +> ``` +> +> "/nix/store/2hhl2nz5v0khbn06ys82nrk99aa1xxdw-foo" + +A derivation interpolates to the [store path] of its first [output](./derivations.md#attr-outputs). + +> **Example** +> +> ```nix +> let +> pkgs = import {}; +> in +> "${pkgs.hello}" +> ``` +> +> "/nix/store/4xpfqf29z4m8vbhrqcz064wfmb46w5r7-hello-2.12.1" + +An attribute set interpolates to the return value of the function in the `__toString` applied to the attribute set itself. + +> **Example** +> +> ```nix +> let +> a = { +> value = 1; +> __toString = self: toString (self.value + 1); +> }; +> in +> "${a}" +> ``` +> +> "2" + +An attribute set also interpolates to the value of its `outPath` attribute. + +> **Example** +> +> ```nix +> let +> a = { outPath = "foo"; }; +> in +> "${a}" +> ``` +> +> "foo" + +If both `__toString` and `outPath` are present in an attribute set, `__toString` takes precedence. + +> **Example** +> +> ```nix +> let +> a = { __toString = _: "yes"; outPath = throw "no"; }; +> in +> "${a}" +> ``` +> +> "yes" + +If neither is present, an error is thrown. + +> **Example** +> +> ```nix +> let +> a = {}; +> in +> "${a}" +> ``` +> +> error: cannot coerce a set to a string +> +> at «string»:4:2: +> +> 3| in +> 4| "${a}" +> | ^ diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index 2ae3e143a..0bb656746 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -107,29 +107,24 @@ e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user whose home directory is `/home/edolstra`. - Paths can also be specified between angle brackets, e.g. - ``. This means that the directories listed in the - environment variable `NIX_PATH` will be searched for the given file - or directory name. - - When an [interpolated string][string interpolation] evaluates to a path, the path is first copied into the Nix store and the resulting string is the [store path] of the newly created [store object]. - - [store path]: ../glossary.md#gloss-store-path - [store object]: ../glossary.md#gloss-store-object - For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/-foo.txt"`. Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression. For example, assume you used a file path in an interpolated string during a `nix repl` session. - Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new store path, since Nix might not re-read the file contents. + Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new [store path], since Nix might not re-read the file contents. - Paths themselves, except those in angle brackets (`< >`), support [string interpolation]. + [store path]: ../glossary.md#gloss-store-path + + Paths can include [string interpolation] and can themselves be [interpolated in other expressions]. + [interpolated in other expressions]: ./string-interpolation.md#interpolated-expressions At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division operation. `./a.${foo}/b.${bar}` is a path. + [Lookup paths](./constructs/lookup-path.md) such as `` resolve to path values. + - Boolean *Booleans* with values `true` and `false`. @@ -167,13 +162,17 @@ An attribute set is a collection of name-value-pairs (called *attributes*) enclo An attribute name can be an identifier or a [string](#string). An identifier must start with a letter (`a-z`, `A-Z`) or underscore (`_`), and can otherwise contain letters (`a-z`, `A-Z`), numbers (`0-9`), underscores (`_`), apostrophes (`'`), or dashes (`-`). +> **Syntax** +> > *name* = *identifier* | *string* \ > *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*` Names and values are separated by an equal sign (`=`). Each value is an arbitrary expression terminated by a semicolon (`;`). -> *attrset* = `{` [ *name* `=` *expr* `;` `]`... `}` +> **Syntax** +> +> *attrset* = `{` [ *name* `=` *expr* `;` ]... `}` Attributes can appear in any order. An attribute name may only occur once. diff --git a/doc/manual/src/package-management/basic-package-mgmt.md b/doc/manual/src/package-management/basic-package-mgmt.md deleted file mode 100644 index 07b92fb76..000000000 --- a/doc/manual/src/package-management/basic-package-mgmt.md +++ /dev/null @@ -1,179 +0,0 @@ -# Basic Package Management - -The main command for package management is -[`nix-env`](../command-ref/nix-env.md). You can use it to install, -upgrade, and erase packages, and to query what packages are installed -or are available for installation. - -In Nix, different users can have different “views” on the set of -installed applications. That is, there might be lots of applications -present on the system (possibly in many different versions), but users -can have a specific selection of those active — where “active” just -means that it appears in a directory in the user’s `PATH`. Such a view -on the set of installed applications is called a *user environment*, -which is just a directory tree consisting of symlinks to the files of -the active applications. - -Components are installed from a set of *Nix expressions* that tell Nix -how to build those packages, including, if necessary, their -dependencies. There is a collection of Nix expressions called the -Nixpkgs package collection that contains packages ranging from basic -development stuff such as GCC and Glibc, to end-user applications like -Mozilla Firefox. (Nix is however not tied to the Nixpkgs package -collection; you could write your own Nix expressions based on Nixpkgs, -or completely new ones.) - -You can manually download the latest version of Nixpkgs from -. However, it’s much more -convenient to use the Nixpkgs [*channel*](../command-ref/nix-channel.md), since it makes -it easy to stay up to date with new versions of Nixpkgs. Nixpkgs is -automatically added to your list of “subscribed” channels when you -install Nix. If this is not the case for some reason, you can add it -as follows: - -```console -$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable -$ nix-channel --update -``` - -> **Note** -> -> On NixOS, you’re automatically subscribed to a NixOS channel -> corresponding to your NixOS major release (e.g. -> ). A NixOS channel is identical -> to the Nixpkgs channel, except that it contains only Linux binaries -> and is updated only if a set of regression tests succeed. - -You can view the set of available packages in Nixpkgs: - -```console -$ nix-env --query --available --attr-path -nixpkgs.aterm aterm-2.2 -nixpkgs.bash bash-3.0 -nixpkgs.binutils binutils-2.15 -nixpkgs.bison bison-1.875d -nixpkgs.blackdown blackdown-1.4.2 -nixpkgs.bzip2 bzip2-1.0.2 -… -``` - -The flag `-q` specifies a query operation, `-a` means that you want -to show the “available” (i.e., installable) packages, as opposed to the -installed packages, and `-P` prints the attribute paths that can be used -to unambiguously select a package for installation (listed in the first column). -If you downloaded Nixpkgs yourself, or if you checked it out from GitHub, -then you need to pass the path to your Nixpkgs tree using the `-f` flag: - -```console -$ nix-env --query --available --attr-path --file /path/to/nixpkgs -aterm aterm-2.2 -bash bash-3.0 -… -``` - -where */path/to/nixpkgs* is where you’ve unpacked or checked out -Nixpkgs. - -You can filter the packages by name: - -```console -$ nix-env --query --available --attr-path firefox -nixpkgs.firefox-esr firefox-91.3.0esr -nixpkgs.firefox firefox-94.0.1 -``` - -and using regular expressions: - -```console -$ nix-env --query --available --attr-path 'firefox.*' -``` - -It is also possible to see the *status* of available packages, i.e., -whether they are installed into the user environment and/or present in -the system: - -```console -$ nix-env --query --available --attr-path --status -… --PS nixpkgs.bash bash-3.0 ---S nixpkgs.binutils binutils-2.15 -IPS nixpkgs.bison bison-1.875d -… -``` - -The first character (`I`) indicates whether the package is installed in -your current user environment. The second (`P`) indicates whether it is -present on your system (in which case installing it into your user -environment would be a very quick operation). The last one (`S`) -indicates whether there is a so-called *substitute* for the package, -which is Nix’s mechanism for doing binary deployment. It just means that -Nix knows that it can fetch a pre-built package from somewhere -(typically a network server) instead of building it locally. - -You can install a package using `nix-env --install --attr `. For instance, - -```console -$ nix-env --install --attr nixpkgs.subversion -``` - -will install the package called `subversion` from `nixpkgs` channel (which is, of course, the -[Subversion version management system](http://subversion.tigris.org/)). - -> **Note** -> -> When you ask Nix to install a package, it will first try to get it in -> pre-compiled form from a *binary cache*. By default, Nix will use the -> binary cache ; it contains binaries for most -> packages in Nixpkgs. Only if no binary is available in the binary -> cache, Nix will build the package from source. So if `nix-env -> -iA nixpkgs.subversion` results in Nix building stuff from source, then either -> the package is not built for your platform by the Nixpkgs build -> servers, or your version of Nixpkgs is too old or too new. For -> instance, if you have a very recent checkout of Nixpkgs, then the -> Nixpkgs build servers may not have had a chance to build everything -> and upload the resulting binaries to . The -> Nixpkgs channel is only updated after all binaries have been uploaded -> to the cache, so if you stick to the Nixpkgs channel (rather than -> using a Git checkout of the Nixpkgs tree), you will get binaries for -> most packages. - -Naturally, packages can also be uninstalled. Unlike when installing, you will -need to use the derivation name (though the version part can be omitted), -instead of the attribute path, as `nix-env` does not record which attribute -was used for installing: - -```console -$ nix-env --uninstall subversion -``` - -Upgrading to a new version is just as easy. If you have a new release of -Nix Packages, you can do: - -```console -$ nix-env --upgrade --attr nixpkgs.subversion -``` - -This will *only* upgrade Subversion if there is a “newer” version in the -new set of Nix expressions, as defined by some pretty arbitrary rules -regarding ordering of version numbers (which generally do what you’d -expect of them). To just unconditionally replace Subversion with -whatever version is in the Nix expressions, use `-i` instead of `-u`; -`-i` will remove whatever version is already installed. - -You can also upgrade all packages for which there are newer versions: - -```console -$ nix-env --upgrade -``` - -Sometimes it’s useful to be able to ask what `nix-env` would do, without -actually doing it. For instance, to find out what packages would be -upgraded by `nix-env --upgrade `, you can do - -```console -$ nix-env --upgrade --dry-run -(dry run; not doing anything) -upgrading `libxslt-1.1.0' to `libxslt-1.1.10' -upgrading `graphviz-1.10' to `graphviz-1.12' -upgrading `coreutils-5.0' to `coreutils-5.2.1' -``` diff --git a/doc/manual/src/protocols/derivation-aterm.md b/doc/manual/src/protocols/derivation-aterm.md new file mode 100644 index 000000000..e58b602a3 --- /dev/null +++ b/doc/manual/src/protocols/derivation-aterm.md @@ -0,0 +1,19 @@ +# Derivation "ATerm" file format + +For historical reasons, [derivations](@docroot@/glossary.md#gloss-store-derivation) are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format. + +Derivations are serialised in one of the following formats: + +- ``` + Derive(...) + ``` + + For all stable derivations. + +- ``` + DrvWithVersion(, ...) + ``` + + The only `version-string`s that are in use today are for [experimental features](@docroot@/contributing/experimental-features.md): + + - `"xp-dyn-drv"` for the [`dynamic-derivations`](@docroot@/contributing/experimental-features.md#xp-feature-dynamic-derivations) experimental feature. diff --git a/doc/manual/src/protocols/tarball-fetcher.md b/doc/manual/src/protocols/tarball-fetcher.md index 0d3212303..274fa6d63 100644 --- a/doc/manual/src/protocols/tarball-fetcher.md +++ b/doc/manual/src/protocols/tarball-fetcher.md @@ -20,8 +20,8 @@ Link: ; rel="immutable" (Note the required `<` and `>` characters around *flakeref*.) -*flakeref* must be a tarball flakeref. It can contain flake attributes -such as `narHash`, `rev` and `revCount`. If `narHash` is included, its +*flakeref* must be a tarball flakeref. It can contain the tarball flake attributes +`narHash`, `rev`, `revCount` and `lastModified`. If `narHash` is included, its value must be the NAR hash of the unpacked tarball (as computed via `nix hash path`). Nix checks the contents of the returned tarball against the `narHash` attribute. The `rev` and `revCount` attributes diff --git a/doc/manual/src/release-notes/release-notes.md b/doc/manual/src/release-notes/release-notes.md index b05d5ee0a..cc805e631 100644 --- a/doc/manual/src/release-notes/release-notes.md +++ b/doc/manual/src/release-notes/release-notes.md @@ -1 +1,12 @@ # Nix Release Notes + +Nix has a release cycle of roughly 6 weeks. +Notable changes and additions are announced in the release notes for each version. + +Bugfixes can be backported on request to previous Nix releases. +We typically backport only as far back as the Nix version used in the latest NixOS release, which is announced in the [NixOS release notes](https://nixos.org/manual/nixos/stable/release-notes.html#ch-release-notes). + +Backports never skip releases. +If a feature is backported to version `x.y`, it must also be available in version `x.(y+1)`. +This ensures that upgrading from an older version with backports is still safe and no backported functionality will go missing. + diff --git a/doc/manual/src/release-notes/rl-2.12.md b/doc/manual/src/release-notes/rl-2.12.md index e2045d7bf..e1e3efe1a 100644 --- a/doc/manual/src/release-notes/rl-2.12.md +++ b/doc/manual/src/release-notes/rl-2.12.md @@ -2,7 +2,6 @@ * On Linux, Nix can now run builds in a user namespace where they run as root (UID 0) and have 65,536 UIDs available. - This is primarily useful for running containers such as `systemd-nspawn` inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn]. diff --git a/doc/manual/src/release-notes/rl-2.15.md b/doc/manual/src/release-notes/rl-2.15.md index 133121999..4faf0b143 100644 --- a/doc/manual/src/release-notes/rl-2.15.md +++ b/doc/manual/src/release-notes/rl-2.15.md @@ -44,7 +44,7 @@ (The store always had to check whether it trusts the client, but now the client is informed of the store's decision.) This is useful for scripting interactions with (non-legacy-ssh) remote Nix stores. - `nix store ping` and `nix doctor` now display this information. + `nix store info` and `nix doctor` now display this information. * The new command `nix derivation add` allows adding derivations to the store without involving the Nix language. It exists to round out our collection of basic utility/plumbing commands, and allow for a low barrier-to-entry way of experimenting with alternative front-ends to the Nix Store. diff --git a/doc/manual/src/release-notes/rl-2.18.md b/doc/manual/src/release-notes/rl-2.18.md new file mode 100644 index 000000000..4bbc52b50 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.18.md @@ -0,0 +1,28 @@ +# Release 2.18 (2023-09-20) + +- Two new builtin functions, + [`builtins.parseFlakeRef`](@docroot@/language/builtins.md#builtins-parseFlakeRef) + and + [`builtins.flakeRefToString`](@docroot@/language/builtins.md#builtins-flakeRefToString), + have been added. + These functions are useful for converting between flake references encoded as attribute sets and URLs. + +- [`builtins.toJSON`](@docroot@/language/builtins.md#builtins-parseFlakeRef) now prints [--show-trace](@docroot@/command-ref/conf-file.html#conf-show-trace) items for the path in which it finds an evaluation error. + +- Error messages regarding malformed input to [`nix derivation add`](@docroot@/command-ref/new-cli/nix3-derivation-add.md) are now clearer and more detailed. + +- The `discard-references` feature has been stabilized. + This means that the + [unsafeDiscardReferences](@docroot@/contributing/experimental-features.md#xp-feature-discard-references) + attribute is no longer guarded by an experimental flag and can be used + freely. + +- The JSON output for derived paths which are store paths is now a string, not an object with a single `path` field. + This only affects `nix-build --json` when "building" non-derivation things like fetched sources, which is a no-op. + +- A new builtin [`outputOf`](@docroot@/language/builtins.md#builtins-outputOf) has been added. + It is part of the [`dynamic-derivations`](@docroot@/contributing/experimental-features.md#xp-feature-dynamic-derivations) experimental feature. + +- Flake follow paths at depths greater than 2 are now handled correctly, preventing "follows a non-existent input" errors. + +- [`nix-store --query`](@docroot@/command-ref/nix-store/query.md) gained a new type of query: `--valid-derivers`. It returns all `.drv` files in the local store that *can be* used to build the output passed in argument. This is in contrast to `--deriver`, which returns the single `.drv` file that *was actually* used to build the output passed in argument. In case the output was substituted from a binary cache, this `.drv` file may only exist on said binary cache and not locally. diff --git a/doc/manual/src/release-notes/rl-2.7.md b/doc/manual/src/release-notes/rl-2.7.md index 2f3879422..dd649e166 100644 --- a/doc/manual/src/release-notes/rl-2.7.md +++ b/doc/manual/src/release-notes/rl-2.7.md @@ -24,7 +24,7 @@ [repository](https://github.com/NixOS/bundlers) has various bundlers implemented. -* `nix store ping` now reports the version of the remote Nix daemon. +* `nix store info` now reports the version of the remote Nix daemon. * `nix flake {init,new}` now display information about which files have been created. diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md index 6516a2663..276252c37 100644 --- a/doc/manual/src/release-notes/rl-next.md +++ b/doc/manual/src/release-notes/rl-next.md @@ -1,18 +1,17 @@ # Release X.Y (202?-??-??) -- Two new builtin functions, - [`builtins.parseFlakeRef`](@docroot@/language/builtins.md#builtins-parseFlakeRef) - and - [`builtins.flakeRefToString`](@docroot@/language/builtins.md#builtins-flakeRefToString), - have been added. - These functions are useful for converting between flake references encoded as attribute sets and URLs. +- [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters. -- [`builtins.toJSON`](@docroot@/language/builtins.md#builtins-parseFlakeRef) now prints [--show-trace](@docroot@/command-ref/conf-file.html#conf-show-trace) items for the path in which it finds an evaluation error. +- [Path-like flake references](@docroot@/command-ref/new-cli/nix3-flake.md#path-like-syntax) now accept arbitrary unicode characters (except `#` and `?`). -- Error messages regarding malformed input to [`derivation add`](@docroot@/command-ref/new-cli/nix3-derivation-add.md) are now clearer and more detailed. +- The experimental feature `repl-flake` is no longer needed, as its functionality is now part of the `flakes` experimental feature. To get the previous behavior, use the `--file/--expr` flags accordingly. -- The `discard-references` feature has been stabilized. - This means that the - [unsafeDiscardReferences](@docroot@/contributing/experimental-features.md#xp-feature-discard-references) - attribute is no longer guarded by an experimental flag and can be used - freely. +- Introduce new flake installable syntax `flakeref#.attrPath` where the "." prefix denotes no searching of default attribute prefixes like `packages.` or `legacyPackages.`. + +- Nix adds `apple-virt` to the default system features on macOS systems that support virtualization. This is similar to what's done for the `kvm` system feature on Linux hosts. + +- Introduce a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash). + +- `nix-shell` shebang lines now support single-quoted arguments. + +- `builtins.fetchTree` is now marked as stable. diff --git a/doc/manual/utils.nix b/doc/manual/utils.nix index 9043dd8cd..849832b2c 100644 --- a/doc/manual/utils.nix +++ b/doc/manual/utils.nix @@ -44,63 +44,6 @@ rec { optionalString = cond: string: if cond then string else ""; - showSetting = { useAnchors }: name: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }: - let - result = squash '' - - ${if useAnchors - then ''[`${name}`](#conf-${name})'' - else ''`${name}`''} - - ${indent " " body} - ''; - - experimentalFeatureNote = optionalString (experimentalFeature != null) '' - > **Warning** - > This setting is part of an - > [experimental feature](@docroot@/contributing/experimental-features.md). - - To change this setting, you need to make sure the corresponding experimental feature, - [`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}), - is enabled. - For example, include the following in [`nix.conf`](#): - - ``` - extra-experimental-features = ${experimentalFeature} - ${name} = ... - ``` - ''; - - # separate body to cleanly handle indentation - body = '' - ${description} - - ${experimentalFeatureNote} - - **Default:** ${showDefault documentDefault defaultValue} - - ${showAliases aliases} - ''; - - showDefault = documentDefault: defaultValue: - if documentDefault then - # a StringMap value type is specified as a string, but - # this shows the value type. The empty stringmap is `null` in - # JSON, but that converts to `{ }` here. - if defaultValue == "" || defaultValue == [] || isAttrs defaultValue - then "*empty*" - else if isBool defaultValue then - if defaultValue then "`true`" else "`false`" - else "`${toString defaultValue}`" - else "*machine-specific*"; - - showAliases = aliases: - optionalString (aliases != []) - "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}"; - - in result; - indent = prefix: s: concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s)); - - showSettings = args: settingsInfo: concatStrings (attrValues (mapAttrs (showSetting args) settingsInfo)); } diff --git a/flake.lock b/flake.lock index 1d2aab5ed..56df9c3fb 100644 --- a/flake.lock +++ b/flake.lock @@ -34,16 +34,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1670461440, - "narHash": "sha256-jy1LB8HOMKGJEGXgzFRLDU1CBGL0/LlkolgnqIsF0D8=", + "lastModified": 1695283060, + "narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "04a75b2eecc0acf6239acf9dd04485ff8d14f425", + "rev": "31ed632c692e6a36cfc18083b88ece892f863ed4", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-22.11-small", + "ref": "nixos-23.05-small", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index bdbf54169..398ba10a0 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,7 @@ { description = "The purely functional package manager"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11-small"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; @@ -19,11 +19,16 @@ then "" else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; + linux32BitSystems = [ "i686-linux" ]; linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ]; - linuxSystems = linux64BitSystems ++ [ "i686-linux" ]; - systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ]; + linuxSystems = linux32BitSystems ++ linux64BitSystems; + darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ]; + systems = linuxSystems ++ darwinSystems; - crossSystems = [ "armv6l-linux" "armv7l-linux" ]; + crossSystems = [ + "armv6l-linux" "armv7l-linux" + "x86_64-freebsd13" "x86_64-netbsd" + ]; stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; @@ -40,12 +45,69 @@ }) stdenvs); + # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981 + # Not an "idiomatic" flake input because: + # - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730 + # - Subflake would download redundant and huge parent flake + # - No git tree hash support: https://github.com/NixOS/nix/issues/6044 + inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; })) + fileset; + + baseFiles = + # .gitignore has already been processed, so any changes in it are irrelevant + # at this point. It is not represented verbatim for test purposes because + # that would interfere with repo semantics. + fileset.fileFilter (f: f.name != ".gitignore") ./.; + + configureFiles = fileset.unions [ + ./.version + ./configure.ac + ./m4 + # TODO: do we really need README.md? It doesn't seem used in the build. + ./README.md + ]; + + topLevelBuildFiles = fileset.unions [ + ./local.mk + ./Makefile + ./Makefile.config.in + ./mk + ]; + + functionalTestFiles = fileset.unions [ + ./tests/functional + (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) + ]; + + nixSrc = fileset.toSource { + root = ./.; + fileset = fileset.intersect baseFiles (fileset.unions [ + configureFiles + topLevelBuildFiles + ./boehmgc-coroutine-sp-fallback.diff + ./doc + ./misc + ./precompiled-headers.h + ./src + ./unit-test-data + ./COPYING + ./scripts/local.mk + functionalTestFiles + ]); + }; # Memoize nixpkgs for different platforms for efficiency. nixpkgsFor = forAllSystems (system: let make-pkgs = crossSystem: stdenv: import nixpkgs { - inherit system crossSystem; + localSystem = { + inherit system; + }; + crossSystem = if crossSystem == null then null else { + system = crossSystem; + } // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") { + useLLVM = true; + }; overlays = [ (overlayFor (p: p.${stdenv})) ]; @@ -131,9 +193,9 @@ libarchive boost lowdown-nix + libsodium ] ++ lib.optionals stdenv.isLinux [libseccomp] - ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; checkDeps = [ @@ -209,7 +271,14 @@ "-${client.version}-against-${daemon.version}"; inherit version; - src = self; + src = fileset.toSource { + root = ./.; + fileset = fileset.intersect baseFiles (fileset.unions [ + configureFiles + topLevelBuildFiles + functionalTestFiles + ]); + }; VERSION_SUFFIX = versionSuffix; @@ -219,7 +288,9 @@ enableParallelBuilding = true; - configureFlags = testConfigureFlags; # otherwise configure fails + configureFlags = + testConfigureFlags # otherwise configure fails + ++ [ "--disable-build" ]; dontBuild = true; doInstallCheck = true; @@ -227,7 +298,10 @@ mkdir -p $out ''; - installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES"; + installCheckPhase = '' + mkdir -p src/nix-channel + make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES + ''; }; binaryTarball = nix: pkgs: @@ -320,18 +394,11 @@ }; let canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform; - - sourceByRegexInverted = rxs: origSrc: final.lib.cleanSourceWith { - filter = (path: type: - let relPath = final.lib.removePrefix (toString origSrc + "/") (toString path); - in ! lib.any (re: builtins.match re relPath != null) rxs); - src = origSrc; - }; in currentStdenv.mkDerivation (finalAttrs: { name = "nix-${version}"; inherit version; - src = sourceByRegexInverted [ "tests/nixos/.*" "tests/installer/.*" ] self; + src = nixSrc; VERSION_SUFFIX = versionSuffix; outputs = [ "out" "dev" "doc" ]; @@ -407,39 +474,13 @@ hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru.perl-bindings = with final; perl.pkgs.toPerlModule (currentStdenv.mkDerivation { - name = "nix-perl-${version}"; - - src = self; - - nativeBuildInputs = - [ buildPackages.autoconf-archive - buildPackages.autoreconfHook - buildPackages.pkg-config - ]; - - buildInputs = - [ nix - curl - bzip2 - xz - pkgs.perl - boost - ] - ++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium - ++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security; - - configureFlags = [ - "--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}" - "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}" - ]; - - enableParallelBuilding = true; - - postUnpack = "sourceRoot=$sourceRoot/perl"; - }); + passthru.perl-bindings = final.callPackage ./perl { + inherit fileset; + stdenv = currentStdenv; + }; meta.platforms = lib.platforms.unix; + meta.mainProgram = "nix"; }); lowdown-nix = with final; currentStdenv.mkDerivation rec { @@ -460,18 +501,6 @@ }; }; - nixos-lib = import (nixpkgs + "/nixos/lib") { }; - - # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests - runNixOSTestFor = system: test: nixos-lib.runTest { - imports = [ test ]; - hostPkgs = nixpkgsFor.${system}.native; - defaults = { - nixpkgs.pkgs = nixpkgsFor.${system}.native; - }; - _module.args.nixpkgs = nixpkgs; - }; - in { # A Nixpkgs overlay that overrides the 'nix' and # 'nix.perl-bindings' packages. @@ -529,7 +558,7 @@ releaseTools.coverageAnalysis { name = "nix-coverage-${version}"; - src = self; + src = nixSrc; configureFlags = testConfigureFlags; @@ -546,6 +575,8 @@ lcovFilter = [ "*/boost/*" "*-tab.*" ]; hardeningDisable = ["fortify"]; + + NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; }; # API docs for Nix's unstable internal C++ interfaces. @@ -557,7 +588,7 @@ pname = "nix-internal-api-docs"; inherit version; - src = self; + src = nixSrc; configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags; @@ -576,47 +607,29 @@ }; # System tests. - tests.authorization = runNixOSTestFor "x86_64-linux" ./tests/nixos/authorization.nix; + tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // { - tests.remoteBuilds = runNixOSTestFor "x86_64-linux" ./tests/nixos/remote-builds.nix; + # Make sure that nix-env still produces the exact same result + # on a particular version of Nixpkgs. + evalNixpkgs = + with nixpkgsFor.x86_64-linux.native; + runCommand "eval-nixos" { buildInputs = [ nix ]; } + '' + type -p nix-env + # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593. + time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages + [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]] + mkdir $out + ''; - tests.nix-copy-closure = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy-closure.nix; - - tests.nix-copy = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy.nix; - - tests.nssPreload = runNixOSTestFor "x86_64-linux" ./tests/nixos/nss-preload.nix; - - tests.githubFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/github-flakes.nix; - - tests.sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/sourcehut-flakes.nix; - - tests.tarballFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/tarball-flakes.nix; - - tests.containers = runNixOSTestFor "x86_64-linux" ./tests/nixos/containers/containers.nix; - - tests.setuid = lib.genAttrs - ["i686-linux" "x86_64-linux"] - (system: runNixOSTestFor system ./tests/nixos/setuid.nix); - - - # Make sure that nix-env still produces the exact same result - # on a particular version of Nixpkgs. - tests.evalNixpkgs = - with nixpkgsFor.x86_64-linux.native; - runCommand "eval-nixos" { buildInputs = [ nix ]; } - '' - type -p nix-env - # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593. - time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages - [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]] - mkdir $out - ''; - - tests.nixpkgsLibTests = - forAllSystems (system: - import (nixpkgs + "/lib/tests/release.nix") - { pkgs = nixpkgsFor.${system}.native; } - ); + nixpkgsLibTests = + forAllSystems (system: + import (nixpkgs + "/lib/tests/release.nix") + { pkgs = nixpkgsFor.${system}.native; + nixVersions = [ self.packages.${system}.nix ]; + } + ); + }; metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" { pkgs = nixpkgsFor.x86_64-linux.native; @@ -687,6 +700,9 @@ devShells = let makeShell = pkgs: stdenv: + let + canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + in with commonDeps { inherit pkgs; }; stdenv.mkDerivation { name = "nix"; @@ -694,13 +710,18 @@ outputs = [ "out" "dev" "doc" ]; nativeBuildInputs = nativeBuildDeps - ++ (lib.optionals stdenv.cc.isClang [ pkgs.bear pkgs.clang-tools ]); + ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear + ++ lib.optional + (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) + pkgs.buildPackages.clang-tools + ; buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ checkDeps ++ internalApiDocsDeps; configureFlags = configureFlags - ++ testConfigureFlags ++ internalApiDocsConfigureFlags; + ++ testConfigureFlags ++ internalApiDocsConfigureFlags + ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; enableParallelBuilding = true; diff --git a/local.mk b/local.mk index 6951c179e..3f3abb9f0 100644 --- a/local.mk +++ b/local.mk @@ -1,5 +1,3 @@ -clean-files += Makefile.config - GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch # Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers. ERROR_SWITCH_ENUM = -Werror=switch-enum diff --git a/maintainers/README.md b/maintainers/README.md index 0d520cb0c..5be4f9d04 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -50,7 +50,9 @@ The team meets twice a week: 1. Code review on pull requests from [In review](#in-review). 2. Other chores and tasks. -Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50). +Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw). +Notes on issues and pull requests are posted as comments and linked from the meeting notes, so they are easy to find from both places. +[All meeting notes](https://discourse.nixos.org/search?expanded=true&q=Nix%20team%20meeting%20minutes%20%23%20%23dev%3Anix%20in%3Atitle%20order%3Alatest_topic) are published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50). ## Project board protocol @@ -96,8 +98,10 @@ What constitutes a trivial pull request is up to maintainers' judgement. Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings. This may be where the merit of the change itself or the implementation strategy is contested by a team member. +Whenever the discussion opens up questions about the process or this team's goals, this may indicate that the change is too large in scope. +In that case it is taken off the board to be reconsidered by the author or broken down into smaller pieces that are less far-reaching and can be reviewed independently. -As a general guideline, the order of items is determined as follows: +As a general guideline, the order of items to discuss is determined as follows: - Prioritise pull requests over issues diff --git a/mk/common-test.sh b/mk/common-test.sh index 0a2e4c1c2..7ab25febf 100644 --- a/mk/common-test.sh +++ b/mk/common-test.sh @@ -1,11 +1,15 @@ +test_dir=tests/functional + +test=$(echo -n "$test" | sed -e "s|^$test_dir/||") + TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=') : ${BASH:=/usr/bin/env bash} init_test () { - cd tests && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null + cd "$test_dir" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null } run_test_proper () { - cd $(dirname $test) && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test) + cd "$test_dir/$(dirname $test)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test) } diff --git a/mk/programs.mk b/mk/programs.mk index 1ee1d3fa5..a88d9d949 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -87,6 +87,6 @@ define build-program # Phony target to run this program (typically as a dependency of 'check'). .PHONY: $(1)_RUN $(1)_RUN: $$($(1)_PATH) - $(trace-test) $$($(1)_PATH) + $(trace-test) $$(UNIT_TEST_ENV) $$($(1)_PATH) endef diff --git a/perl/Makefile b/perl/Makefile index c2c95f255..832668dd1 100644 --- a/perl/Makefile +++ b/perl/Makefile @@ -1,6 +1,12 @@ makefiles = local.mk -GLOBAL_CXXFLAGS += -g -Wall -std=c++2a -I ../src +GLOBAL_CXXFLAGS += -g -Wall -std=c++2a + +# A convenience for concurrent development of Nix and its Perl bindings. +# Not needed in a standalone build of the Perl bindings. +ifneq ("$(wildcard ../src)", "") + GLOBAL_CXXFLAGS += -I ../src +endif -include Makefile.config diff --git a/perl/default.nix b/perl/default.nix new file mode 100644 index 000000000..4687976a1 --- /dev/null +++ b/perl/default.nix @@ -0,0 +1,51 @@ +{ lib, fileset +, stdenv +, perl, perlPackages +, autoconf-archive, autoreconfHook, pkg-config +, nix, curl, bzip2, xz, boost, libsodium, darwin +}: + +perl.pkgs.toPerlModule (stdenv.mkDerivation { + name = "nix-perl-${nix.version}"; + + src = fileset.toSource { + root = ../.; + fileset = fileset.unions [ + ../.version + ../m4 + ../mk + ./MANIFEST + ./Makefile + ./Makefile.config.in + ./configure.ac + ./lib + ./local.mk + ]; + }; + + nativeBuildInputs = + [ autoconf-archive + autoreconfHook + pkg-config + ]; + + buildInputs = + [ nix + curl + bzip2 + xz + perl + boost + ] + ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium + ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; + + configureFlags = [ + "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" + "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" + ]; + + enableParallelBuilding = true; + + postUnpack = "sourceRoot=$sourceRoot/perl"; +}) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index c38ea2d2b..08f812b31 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -78,7 +78,7 @@ SV * queryReferences(char * path) SV * queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true); + auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32) XPUSHs(&PL_sv_undef); else XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); - auto s = info->narHash.to_string(base32 ? Base32 : Base16, true); + auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); @@ -206,7 +206,7 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { Hash h = hashPath(parseHashType(algo), path).first; - auto s = h.to_string(base32 ? Base32 : Base16, false); + auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -217,7 +217,7 @@ SV * hashFile(char * algo, int base32, char * path) PPCODE: try { Hash h = hashFile(parseHashType(algo), path); - auto s = h.to_string(base32 ? Base32 : Base16, false); + auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -228,7 +228,7 @@ SV * hashString(char * algo, int base32, char * s) PPCODE: try { Hash h = hashString(parseHashType(algo), s); - auto s = h.to_string(base32 ? Base32 : Base16, false); + auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -239,7 +239,7 @@ SV * convertHash(char * algo, char * s, int toBase32) PPCODE: try { auto h = Hash::parseAny(s, parseHashType(algo)); - auto s = h.to_string(toBase32 ? Base32 : Base16, false); + auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -324,7 +324,7 @@ SV * derivationFromPath(char * drvPath) hv_stores(hash, "outputs", newRV((SV *) outputs)); AV * inputDrvs = newAV(); - for (auto & i : drv.inputDrvs) + for (auto & i : drv.inputDrvs.map) av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 656769d84..a08f62333 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -452,6 +452,14 @@ EOF # a row for different files. if [ -e "$profile_target$PROFILE_BACKUP_SUFFIX" ]; then # this backup process first released in Nix 2.1 + + if diff -q "$profile_target$PROFILE_BACKUP_SUFFIX" "$profile_target" > /dev/null; then + # a backup file for the rc-file exist, but they are identical, + # so we can safely ignore it and overwrite it with the same + # content later + continue + fi + failure <parseStorePathSet(inputs); optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv); auto & result = *optResult; @@ -322,7 +322,12 @@ connected: throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg); } else { copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute); - auto res = sshStore->buildPathsWithResults({ DerivedPath::Built { *drvPath, OutputsSpec::All {} } }); + auto res = sshStore->buildPathsWithResults({ + DerivedPath::Built { + .drvPath = makeConstantStorePathRef(*drvPath), + .outputs = OutputsSpec::All {}, + } + }); // One path to build should produce exactly one build result assert(res.size() == 1); optResult = std::move(res[0]); diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index db9c440e3..9a2dce806 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -8,13 +8,39 @@ namespace nix { -nlohmann::json BuiltPath::Built::toJSON(ref store) const { - nlohmann::json res; - res["drvPath"] = store->printStorePath(drvPath); - for (const auto& [output, path] : outputs) { - res["outputs"][output] = store->printStorePath(path); +#define CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, COMPARATOR) \ + bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ + { \ + const MY_TYPE* me = this; \ + auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + me = &other; \ + auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + return fields1 COMPARATOR fields2; \ } - return res; +#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ + CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, ==) \ + CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \ + CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <) + +#define FIELD_TYPE std::pair +CMP(SingleBuiltPath, SingleBuiltPathBuilt, output) +#undef FIELD_TYPE + +#define FIELD_TYPE std::map +CMP(SingleBuiltPath, BuiltPathBuilt, outputs) +#undef FIELD_TYPE + +#undef CMP +#undef CMP_ONE + +StorePath SingleBuiltPath::outPath() const +{ + return std::visit( + overloaded{ + [](const SingleBuiltPath::Opaque & p) { return p.path; }, + [](const SingleBuiltPath::Built & b) { return b.output.second; }, + }, raw() + ); } StorePathSet BuiltPath::outPaths() const @@ -32,6 +58,62 @@ StorePathSet BuiltPath::outPaths() const ); } +SingleDerivedPath::Built SingleBuiltPath::Built::discardOutputPath() const +{ + return SingleDerivedPath::Built { + .drvPath = make_ref(drvPath->discardOutputPath()), + .output = output.first, + }; +} + +SingleDerivedPath SingleBuiltPath::discardOutputPath() const +{ + return std::visit( + overloaded{ + [](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath { + return p; + }, + [](const SingleBuiltPath::Built & b) -> SingleDerivedPath { + return b.discardOutputPath(); + }, + }, raw() + ); +} + +nlohmann::json BuiltPath::Built::toJSON(const Store & store) const +{ + nlohmann::json res; + res["drvPath"] = drvPath->toJSON(store); + for (const auto & [outputName, outputPath] : outputs) { + res["outputs"][outputName] = store.printStorePath(outputPath); + } + return res; +} + +nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const +{ + nlohmann::json res; + res["drvPath"] = drvPath->toJSON(store); + auto & [outputName, outputPath] = output; + res["output"] = outputName; + res["outputPath"] = store.printStorePath(outputPath); + return res; +} + +nlohmann::json SingleBuiltPath::toJSON(const Store & store) const +{ + return std::visit([&](const auto & buildable) { + return buildable.toJSON(store); + }, raw()); +} + +nlohmann::json BuiltPath::toJSON(const Store & store) const +{ + return std::visit([&](const auto & buildable) { + return buildable.toJSON(store); + }, raw()); +} + RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const { RealisedPath::Set res; @@ -40,7 +122,7 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const [&](const BuiltPath::Opaque & p) { res.insert(p.path); }, [&](const BuiltPath::Built & p) { auto drvHashes = - staticOutputHashes(store, store.readDerivation(p.drvPath)); + staticOutputHashes(store, store.readDerivation(p.drvPath->outPath())); for (auto& [outputName, outputPath] : p.outputs) { if (experimentalFeatureSettings.isEnabled( Xp::CaDerivations)) { @@ -48,7 +130,7 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const if (!drvOutput) throw Error( "the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)", - store.printStorePath(p.drvPath), outputName); + store.printStorePath(p.drvPath->outPath()), outputName); auto thisRealisation = store.queryRealisation( DrvOutput{*drvOutput, outputName}); assert(thisRealisation); // We’ve built it, so we must diff --git a/src/libcmd/built-path.hh b/src/libcmd/built-path.hh index 744e8090b..e677bc810 100644 --- a/src/libcmd/built-path.hh +++ b/src/libcmd/built-path.hh @@ -3,19 +3,64 @@ namespace nix { +struct SingleBuiltPath; + +struct SingleBuiltPathBuilt { + ref drvPath; + std::pair output; + + SingleDerivedPathBuilt discardOutputPath() const; + + std::string to_string(const Store & store) const; + static SingleBuiltPathBuilt parse(const Store & store, std::string_view, std::string_view); + nlohmann::json toJSON(const Store & store) const; + + DECLARE_CMP(SingleBuiltPathBuilt); +}; + +using _SingleBuiltPathRaw = std::variant< + DerivedPathOpaque, + SingleBuiltPathBuilt +>; + +struct SingleBuiltPath : _SingleBuiltPathRaw { + using Raw = _SingleBuiltPathRaw; + using Raw::Raw; + + using Opaque = DerivedPathOpaque; + using Built = SingleBuiltPathBuilt; + + inline const Raw & raw() const { + return static_cast(*this); + } + + StorePath outPath() const; + + SingleDerivedPath discardOutputPath() const; + + static SingleBuiltPath parse(const Store & store, std::string_view); + nlohmann::json toJSON(const Store & store) const; +}; + +static inline ref staticDrv(StorePath drvPath) +{ + return make_ref(SingleBuiltPath::Opaque { drvPath }); +} + /** * A built derived path with hints in the form of optional concrete output paths. * * See 'BuiltPath' for more an explanation. */ struct BuiltPathBuilt { - StorePath drvPath; + ref drvPath; std::map outputs; - nlohmann::json toJSON(ref store) const; - static BuiltPathBuilt parse(const Store & store, std::string_view); + std::string to_string(const Store & store) const; + static BuiltPathBuilt parse(const Store & store, std::string_view, std::string_view); + nlohmann::json toJSON(const Store & store) const; - GENERATE_CMP(BuiltPathBuilt, me->drvPath, me->outputs); + DECLARE_CMP(BuiltPathBuilt); }; using _BuiltPathRaw = std::variant< @@ -41,6 +86,7 @@ struct BuiltPath : _BuiltPathRaw { StorePathSet outPaths() const; RealisedPath::Set toRealisedPaths(Store & store) const; + nlohmann::json toJSON(const Store & store) const; }; typedef std::vector BuiltPaths; diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 4fc197956..a88ba8134 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -98,7 +98,7 @@ EvalCommand::EvalCommand() EvalCommand::~EvalCommand() { if (evalState) - evalState->printStats(); + evalState->maybePrintStats(); } ref EvalCommand::getEvalStore() diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index 96236b987..dafc0db3b 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -34,21 +34,28 @@ struct NixMultiCommand : virtual MultiCommand, virtual Command // For the overloaded run methods #pragma GCC diagnostic ignored "-Woverloaded-virtual" -/* A command that requires a Nix store. */ +/** + * A command that requires a \ref Store "Nix store". + */ struct StoreCommand : virtual Command { StoreCommand(); void run() override; ref getStore(); virtual ref createStore(); + /** + * Main entry point, with a `Store` provided + */ virtual void run(ref) = 0; private: std::shared_ptr _store; }; -/* A command that copies something between `--from` and `--to` - stores. */ +/** + * A command that copies something between `--from` and `--to` \ref + * Store stores. + */ struct CopyCommand : virtual StoreCommand { std::string srcUri, dstUri; @@ -60,6 +67,9 @@ struct CopyCommand : virtual StoreCommand ref getDstStore(); }; +/** + * A command that needs to evaluate Nix language expressions. + */ struct EvalCommand : virtual StoreCommand, MixEvalArgs { bool startReplOnEvalErrors = false; @@ -79,20 +89,26 @@ private: std::shared_ptr evalState; }; +/** + * A mixin class for commands that process flakes, adding a few standard + * flake-related options/flags. + */ struct MixFlakeOptions : virtual Args, EvalCommand { flake::LockFlags lockFlags; - std::optional needsFlakeInputCompletion = {}; - MixFlakeOptions(); - virtual std::vector getFlakesForCompletion() + /** + * The completion for some of these flags depends on the flake(s) in + * question. + * + * This method should be implemented to gather all flakerefs the + * command is operating with (presumably specified via some other + * arguments) so that the completions for these flags can use them. + */ + virtual std::vector getFlakeRefsForCompletion() { return {}; } - - void completeFlakeInput(std::string_view prefix); - - void completionHook() override; }; struct SourceExprCommand : virtual Args, MixFlakeOptions @@ -112,15 +128,35 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions virtual Strings getDefaultFlakeAttrPathPrefixes(); - void completeInstallable(std::string_view prefix); + /** + * Complete an installable from the given prefix. + */ + void completeInstallable(AddCompletions & completions, std::string_view prefix); + + /** + * Convenience wrapper around the underlying function to make setting the + * callback easier. + */ + CompleterClosure getCompleteInstallable(); }; +/** + * A mixin class for commands that need a read-only flag. + * + * What exactly is "read-only" is unspecified, but it will usually be + * the \ref Store "Nix store". + */ struct MixReadOnlyOption : virtual Args { MixReadOnlyOption(); }; -/* Like InstallablesCommand but the installables are not loaded */ +/** + * Like InstallablesCommand but the installables are not loaded. + * + * This is needed by `CmdRepl` which wants to load (and reload) the + * installables itself. + */ struct RawInstallablesCommand : virtual Args, SourceExprCommand { RawInstallablesCommand(); @@ -129,19 +165,22 @@ struct RawInstallablesCommand : virtual Args, SourceExprCommand void run(ref store) override; - // FIXME make const after CmdRepl's override is fixed up + // FIXME make const after `CmdRepl`'s override is fixed up virtual void applyDefaultInstallables(std::vector & rawInstallables); bool readFromStdIn = false; - std::vector getFlakesForCompletion() override; + std::vector getFlakeRefsForCompletion() override; private: std::vector rawInstallables; }; -/* A command that operates on a list of "installables", which can be - store paths, attribute paths, Nix expressions, etc. */ + +/** + * A command that operates on a list of "installables", which can be + * store paths, attribute paths, Nix expressions, etc. + */ struct InstallablesCommand : RawInstallablesCommand { virtual void run(ref store, Installables && installables) = 0; @@ -149,7 +188,9 @@ struct InstallablesCommand : RawInstallablesCommand void run(ref store, std::vector && rawInstallables) override; }; -/* A command that operates on exactly one "installable" */ +/** + * A command that operates on exactly one "installable". + */ struct InstallableCommand : virtual Args, SourceExprCommand { InstallableCommand(); @@ -158,10 +199,7 @@ struct InstallableCommand : virtual Args, SourceExprCommand void run(ref store) override; - std::vector getFlakesForCompletion() override - { - return {_installable}; - } + std::vector getFlakeRefsForCompletion() override; private: @@ -175,7 +213,12 @@ struct MixOperateOnOptions : virtual Args MixOperateOnOptions(); }; -/* A command that operates on zero or more store paths. */ +/** + * A command that operates on zero or more extant store paths. + * + * If the argument the user passes is a some sort of recipe for a path + * not yet built, it must be built first. + */ struct BuiltPathsCommand : InstallablesCommand, virtual MixOperateOnOptions { private: @@ -207,7 +250,9 @@ struct StorePathsCommand : public BuiltPathsCommand void run(ref store, BuiltPaths && paths) override; }; -/* A command that operates on exactly one store path. */ +/** + * A command that operates on exactly one store path. + */ struct StorePathCommand : public StorePathsCommand { virtual void run(ref store, const StorePath & storePath) = 0; @@ -215,7 +260,9 @@ struct StorePathCommand : public StorePathsCommand void run(ref store, StorePaths && storePaths) override; }; -/* A helper class for registering commands globally. */ +/** + * A helper class for registering \ref Command commands globally. + */ struct RegisterCommand { typedef std::map, std::function()>> Commands; @@ -271,13 +318,18 @@ struct MixEnvironment : virtual Args { MixEnvironment(); - /* Modify global environ based on ignoreEnvironment, keep, and unset. It's expected that exec will be called before this class goes out of scope, otherwise environ will become invalid. */ + /*** + * Modify global environ based on `ignoreEnvironment`, `keep`, and + * `unset`. It's expected that exec will be called before this class + * goes out of scope, otherwise `environ` will become invalid. + */ void setEnviron(); }; -void completeFlakeRef(ref store, std::string_view prefix); +void completeFlakeRef(AddCompletions & completions, ref store, std::string_view prefix); void completeFlakeRefWithFragment( + AddCompletions & completions, ref evalState, flake::LockFlags lockFlags, Strings attrPathPrefixes, diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index e36bda52f..e53bc4c01 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -9,6 +9,7 @@ #include "flake/flakeref.hh" #include "store-api.hh" #include "command.hh" +#include "tarball.hh" namespace nix { @@ -132,8 +133,8 @@ MixEvalArgs::MixEvalArgs() if (to.subdir != "") extraAttrs["dir"] = to.subdir; fetchers::overrideRegistry(from.input, to.input, extraAttrs); }}, - .completer = {[&](size_t, std::string_view prefix) { - completeFlakeRef(openStore(), prefix); + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, openStore(), prefix); }} }); @@ -168,14 +169,14 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s) { if (EvalSettings::isPseudoUrl(s)) { auto storePath = fetchers::downloadTarball( - state.store, EvalSettings::resolvePseudoUrl(s), "source", false).tree.storePath; + state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath; return state.rootPath(CanonPath(state.store->toRealPath(storePath))); } else if (hasPrefix(s, "flake:")) { experimentalFeatureSettings.require(Xp::Flakes); auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false); - auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath; + auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first; return state.rootPath(CanonPath(state.store->toRealPath(storePath))); } diff --git a/src/libcmd/installable-attr-path.cc b/src/libcmd/installable-attr-path.cc index b35ca2910..06e507872 100644 --- a/src/libcmd/installable-attr-path.cc +++ b/src/libcmd/installable-attr-path.cc @@ -80,7 +80,7 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() [&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { return e; }, - }, extendedOutputsSpec.raw()); + }, extendedOutputsSpec.raw); auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs); @@ -92,10 +92,11 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() for (auto & [drvPath, outputs] : byDrvPath) res.push_back({ .path = DerivedPath::Built { - .drvPath = drvPath, + .drvPath = makeConstantStorePathRef(drvPath), .outputs = outputs, }, .info = make_ref(ExtraPathInfoValue::Value { + .extendedOutputsSpec = outputs, /* FIXME: reconsider backwards compatibility above so we can fill in this info. */ }), @@ -114,7 +115,7 @@ InstallableAttrPath InstallableAttrPath::parse( return { state, cmd, v, prefix == "." ? "" : std::string { prefix }, - extendedOutputsSpec + std::move(extendedOutputsSpec), }; } diff --git a/src/libcmd/installable-derived-path.cc b/src/libcmd/installable-derived-path.cc index 6ecf54b7c..4d1f83a1c 100644 --- a/src/libcmd/installable-derived-path.cc +++ b/src/libcmd/installable-derived-path.cc @@ -18,14 +18,7 @@ DerivedPathsWithInfo InstallableDerivedPath::toDerivedPaths() std::optional InstallableDerivedPath::getStorePath() { - return std::visit(overloaded { - [&](const DerivedPath::Built & bfd) { - return bfd.drvPath; - }, - [&](const DerivedPath::Opaque & bo) { - return bo.path; - }, - }, derivedPath.raw()); + return derivedPath.getBaseStorePath(); } InstallableDerivedPath InstallableDerivedPath::parse( @@ -42,7 +35,7 @@ InstallableDerivedPath InstallableDerivedPath::parse( // Remove this prior to stabilizing the new CLI. if (storePath.isDerivation()) { auto oldDerivedPath = DerivedPath::Built { - .drvPath = storePath, + .drvPath = makeConstantStorePathRef(storePath), .outputs = OutputsSpec::All { }, }; warn( @@ -55,12 +48,14 @@ InstallableDerivedPath InstallableDerivedPath::parse( }, // If the user did use ^, we just do exactly what is written. [&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath { + auto drv = make_ref(SingleDerivedPath::parse(*store, prefix)); + drvRequireExperiment(*drv); return DerivedPath::Built { - .drvPath = store->parseStorePath(prefix), + .drvPath = std::move(drv), .outputs = outputSpec, }; }, - }, extendedOutputsSpec.raw()); + }, extendedOutputsSpec.raw); return InstallableDerivedPath { store, std::move(derivedPath), diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 4da9b131b..2f428cb7e 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -28,6 +28,11 @@ namespace nix { std::vector InstallableFlake::getActualAttrPaths() { std::vector res; + if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){ + attrPaths.front().erase(0,1); + res.push_back(attrPaths.front()); + return res; + } for (auto & prefix : prefixes) res.push_back(prefix + *attrPaths.begin()); @@ -118,7 +123,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() return {{ .path = DerivedPath::Built { - .drvPath = std::move(drvPath), + .drvPath = makeConstantStorePathRef(std::move(drvPath)), .outputs = std::visit(overloaded { [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { std::set outputsToInstall; @@ -141,7 +146,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() [&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { return e; }, - }, extendedOutputsSpec.raw()), + }, extendedOutputsSpec.raw), }, .info = make_ref( ExtraPathInfoValue::Value { diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index 1eff293cc..08ad35105 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -55,7 +55,8 @@ std::optional InstallableValue::trySinglePathToDerivedPaths else if (v.type() == nString) { return {{ - .path = state->coerceToDerivedPath(pos, v, errorCtx), + .path = DerivedPath::fromSingle( + state->coerceToSingleDerivedPath(pos, v, errorCtx)), .info = make_ref(), }}; } diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 9d593a01f..eff18bbf6 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -28,6 +28,20 @@ namespace nix { +static void completeFlakeInputPath( + AddCompletions & completions, + ref evalState, + const std::vector & flakeRefs, + std::string_view prefix) +{ + for (auto & flakeRef : flakeRefs) { + auto flake = flake::getFlake(*evalState, flakeRef, true); + for (auto & input : flake.inputs) + if (hasPrefix(input.first, prefix)) + completions.add(input.first); + } +} + MixFlakeOptions::MixFlakeOptions() { auto category = "Common flake-related options"; @@ -79,8 +93,8 @@ MixFlakeOptions::MixFlakeOptions() .handler = {[&](std::string s) { lockFlags.inputUpdates.insert(flake::parseInputPath(s)); }}, - .completer = {[&](size_t, std::string_view prefix) { - needsFlakeInputCompletion = {std::string(prefix)}; + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); }} }); @@ -95,11 +109,12 @@ MixFlakeOptions::MixFlakeOptions() flake::parseInputPath(inputPath), parseFlakeRef(flakeRef, absPath("."), true)); }}, - .completer = {[&](size_t n, std::string_view prefix) { - if (n == 0) - needsFlakeInputCompletion = {std::string(prefix)}; - else if (n == 1) - completeFlakeRef(getEvalState()->store, prefix); + .completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) { + if (n == 0) { + completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); + } else if (n == 1) { + completeFlakeRef(completions, getEvalState()->store, prefix); + } }} }); @@ -146,30 +161,12 @@ MixFlakeOptions::MixFlakeOptions() } } }}, - .completer = {[&](size_t, std::string_view prefix) { - completeFlakeRef(getEvalState()->store, prefix); + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getEvalState()->store, prefix); }} }); } -void MixFlakeOptions::completeFlakeInput(std::string_view prefix) -{ - auto evalState = getEvalState(); - for (auto & flakeRefS : getFlakesForCompletion()) { - auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first; - auto flake = flake::getFlake(*evalState, flakeRef, true); - for (auto & input : flake.inputs) - if (hasPrefix(input.first, prefix)) - completions->add(input.first); - } -} - -void MixFlakeOptions::completionHook() -{ - if (auto & prefix = needsFlakeInputCompletion) - completeFlakeInput(*prefix); -} - SourceExprCommand::SourceExprCommand() { addFlag({ @@ -226,11 +223,18 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes() }; } -void SourceExprCommand::completeInstallable(std::string_view prefix) +Args::CompleterClosure SourceExprCommand::getCompleteInstallable() +{ + return [this](AddCompletions & completions, size_t, std::string_view prefix) { + completeInstallable(completions, prefix); + }; +} + +void SourceExprCommand::completeInstallable(AddCompletions & completions, std::string_view prefix) { try { if (file) { - completionType = ctAttrs; + completions.setType(AddCompletions::Type::Attrs); evalSettings.pureEval = false; auto state = getEvalState(); @@ -265,14 +269,15 @@ void SourceExprCommand::completeInstallable(std::string_view prefix) std::string name = state->symbols[i.name]; if (name.find(searchWord) == 0) { if (prefix_ == "") - completions->add(name); + completions.add(name); else - completions->add(prefix_ + "." + name); + completions.add(prefix_ + "." + name); } } } } else { completeFlakeRefWithFragment( + completions, getEvalState(), lockFlags, getDefaultFlakeAttrPathPrefixes(), @@ -285,6 +290,7 @@ void SourceExprCommand::completeInstallable(std::string_view prefix) } void completeFlakeRefWithFragment( + AddCompletions & completions, ref evalState, flake::LockFlags lockFlags, Strings attrPathPrefixes, @@ -296,11 +302,16 @@ void completeFlakeRefWithFragment( try { auto hash = prefix.find('#'); if (hash == std::string::npos) { - completeFlakeRef(evalState->store, prefix); + completeFlakeRef(completions, evalState->store, prefix); } else { - completionType = ctAttrs; + completions.setType(AddCompletions::Type::Attrs); auto fragment = prefix.substr(hash + 1); + std::string prefixRoot = ""; + if (fragment.starts_with(".")){ + fragment = fragment.substr(1); + prefixRoot = "."; + } auto flakeRefS = std::string(prefix.substr(0, hash)); auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath(".")); @@ -309,6 +320,9 @@ void completeFlakeRefWithFragment( auto root = evalCache->getRoot(); + if (prefixRoot == "."){ + attrPathPrefixes.clear(); + } /* Complete 'fragment' relative to all the attrpath prefixes as well as the root of the flake. */ @@ -333,7 +347,7 @@ void completeFlakeRefWithFragment( auto attrPath2 = (*attr)->getAttrPath(attr2); /* Strip the attrpath prefix. */ attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size()); - completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2))); + completions.add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2))); } } } @@ -344,7 +358,7 @@ void completeFlakeRefWithFragment( for (auto & attrPath : defaultFlakeAttrPaths) { auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath)); if (!attr) continue; - completions->add(flakeRefS + "#"); + completions.add(flakeRefS + "#" + prefixRoot); } } } @@ -353,15 +367,15 @@ void completeFlakeRefWithFragment( } } -void completeFlakeRef(ref store, std::string_view prefix) +void completeFlakeRef(AddCompletions & completions, ref store, std::string_view prefix) { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) return; if (prefix == "") - completions->add("."); + completions.add("."); - completeDir(0, prefix); + Args::completeDir(completions, 0, prefix); /* Look for registry entries that match the prefix. */ for (auto & registry : fetchers::getRegistries(store)) { @@ -370,10 +384,10 @@ void completeFlakeRef(ref store, std::string_view prefix) if (!hasPrefix(prefix, "flake:") && hasPrefix(from, "flake:")) { std::string from2(from, 6); if (hasPrefix(from2, prefix)) - completions->add(from2); + completions.add(from2); } else { if (hasPrefix(from, prefix)) - completions->add(from); + completions.add(from); } } } @@ -459,7 +473,7 @@ Installables SourceExprCommand::parseInstallables( result.push_back( make_ref( InstallableAttrPath::parse( - state, *this, vFile, prefix, extendedOutputsSpec))); + state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec)))); } } else { @@ -475,7 +489,7 @@ Installables SourceExprCommand::parseInstallables( if (prefix.find('/') != std::string::npos) { try { result.push_back(make_ref( - InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec))); + InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw))); continue; } catch (BadStorePath &) { } catch (...) { @@ -491,7 +505,7 @@ Installables SourceExprCommand::parseInstallables( getEvalState(), std::move(flakeRef), fragment, - extendedOutputsSpec, + std::move(extendedOutputsSpec), getDefaultFlakeAttrPaths(), getDefaultFlakeAttrPathPrefixes(), lockFlags)); @@ -515,6 +529,30 @@ ref SourceExprCommand::parseInstallable( return installables.front(); } +static SingleBuiltPath getBuiltPath(ref evalStore, ref store, const SingleDerivedPath & b) +{ + return std::visit( + overloaded{ + [&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath { + return SingleBuiltPath::Opaque { bo.path }; + }, + [&](const SingleDerivedPath::Built & bfd) -> SingleBuiltPath { + auto drvPath = getBuiltPath(evalStore, store, *bfd.drvPath); + // Resolving this instead of `bfd` will yield the same result, but avoid duplicative work. + SingleDerivedPath::Built truncatedBfd { + .drvPath = makeConstantStorePathRef(drvPath.outPath()), + .output = bfd.output, + }; + auto outputPath = resolveDerivedPath(*store, truncatedBfd, &*evalStore); + return SingleBuiltPath::Built { + .drvPath = make_ref(std::move(drvPath)), + .output = { bfd.output, outputPath }, + }; + }, + }, + b.raw()); +} + std::vector Installable::build( ref evalStore, ref store, @@ -568,7 +606,10 @@ std::vector, BuiltPathWithResult>> Installable::build [&](const DerivedPath::Built & bfd) { auto outputs = resolveDerivedPath(*store, bfd, &*evalStore); res.push_back({aux.installable, { - .path = BuiltPath::Built { bfd.drvPath, outputs }, + .path = BuiltPath::Built { + .drvPath = make_ref(getBuiltPath(evalStore, store, *bfd.drvPath)), + .outputs = outputs, + }, .info = aux.info}}); }, [&](const DerivedPath::Opaque & bo) { @@ -597,7 +638,10 @@ std::vector, BuiltPathWithResult>> Installable::build for (auto & [outputName, realisation] : buildResult.builtOutputs) outputs.emplace(outputName, realisation.outPath); res.push_back({aux.installable, { - .path = BuiltPath::Built { bfd.drvPath, outputs }, + .path = BuiltPath::Built { + .drvPath = make_ref(getBuiltPath(evalStore, store, *bfd.drvPath)), + .outputs = outputs, + }, .info = aux.info, .result = buildResult}}); }, @@ -691,7 +735,7 @@ StorePathSet Installable::toDerivations( : throw Error("argument '%s' did not evaluate to a derivation", i->what())); }, [&](const DerivedPath::Built & bfd) { - drvPaths.insert(bfd.drvPath); + drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath)); }, }, b.path.raw()); @@ -709,9 +753,7 @@ RawInstallablesCommand::RawInstallablesCommand() expectArgs({ .label = "installables", .handler = {&rawInstallables}, - .completer = {[&](size_t, std::string_view prefix) { - completeInstallable(prefix); - }} + .completer = getCompleteInstallable(), }); } @@ -724,6 +766,17 @@ void RawInstallablesCommand::applyDefaultInstallables(std::vector & } } +std::vector RawInstallablesCommand::getFlakeRefsForCompletion() +{ + applyDefaultInstallables(rawInstallables); + std::vector res; + for (auto i : rawInstallables) + res.push_back(parseFlakeRefWithFragment( + expandTilde(i), + absPath(".")).first); + return res; +} + void RawInstallablesCommand::run(ref store) { if (readFromStdIn && !isatty(STDIN_FILENO)) { @@ -737,10 +790,13 @@ void RawInstallablesCommand::run(ref store) run(store, std::move(rawInstallables)); } -std::vector RawInstallablesCommand::getFlakesForCompletion() +std::vector InstallableCommand::getFlakeRefsForCompletion() { - applyDefaultInstallables(rawInstallables); - return rawInstallables; + return { + parseFlakeRefWithFragment( + expandTilde(_installable), + absPath(".")).first + }; } void InstallablesCommand::run(ref store, std::vector && rawInstallables) @@ -756,9 +812,7 @@ InstallableCommand::InstallableCommand() .label = "installable", .optional = true, .handler = {&_installable}, - .completer = {[&](size_t, std::string_view prefix) { - completeInstallable(prefix); - }} + .completer = getCompleteInstallable(), }); } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d15162e76..2e17a29a7 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -488,35 +488,40 @@ bool NixRepl::processLine(std::string line) std::cout << "The following commands are available:\n" << "\n" - << " Evaluate and print expression\n" - << " = Bind expression to variable\n" - << " :a Add attributes from resulting set to scope\n" - << " :b Build a derivation\n" - << " :bl Build a derivation, creating GC roots in the working directory\n" - << " :e Open package or function in $EDITOR\n" - << " :i Build derivation, then install result into current profile\n" - << " :l Load Nix expression and add it to scope\n" - << " :lf Load Nix flake and add it to scope\n" - << " :p Evaluate and print expression recursively\n" - << " :q Exit nix-repl\n" - << " :r Reload all files\n" - << " :sh Build dependencies of derivation, then start nix-shell\n" - << " :t Describe result of evaluation\n" - << " :u Build derivation, then start nix-shell\n" - << " :doc Show documentation of a builtin function\n" - << " :log Show logs for a derivation\n" - << " :te [bool] Enable, disable or toggle showing traces for errors\n" + << " Evaluate and print expression\n" + << " = Bind expression to variable\n" + << " :a, :add Add attributes from resulting set to scope\n" + << " :b Build a derivation\n" + << " :bl Build a derivation, creating GC roots in the\n" + << " working directory\n" + << " :e, :edit Open package or function in $EDITOR\n" + << " :i Build derivation, then install result into\n" + << " current profile\n" + << " :l, :load Load Nix expression and add it to scope\n" + << " :lf, :load-flake Load Nix flake and add it to scope\n" + << " :p, :print Evaluate and print expression recursively\n" + << " :q, :quit Exit nix-repl\n" + << " :r, :reload Reload all files\n" + << " :sh Build dependencies of derivation, then start\n" + << " nix-shell\n" + << " :t Describe result of evaluation\n" + << " :u Build derivation, then start nix-shell\n" + << " :doc Show documentation of a builtin function\n" + << " :log Show logs for a derivation\n" + << " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n" + << " errors\n" + << " :?, :help Brings up this help menu\n" ; if (state->debugRepl) { std::cout << "\n" << " Debug mode commands\n" - << " :env Show env stack\n" - << " :bt Show trace stack\n" - << " :st Show current trace\n" - << " :st Change to another trace in the stack\n" - << " :c Go until end of program, exception, or builtins.break\n" - << " :s Go one step\n" + << " :env Show env stack\n" + << " :bt, :backtrace Show trace stack\n" + << " :st Show current trace\n" + << " :st Change to another trace in the stack\n" + << " :c, :continue Go until end of program, exception, or builtins.break\n" + << " :s, :step Go one step\n" ; } @@ -648,7 +653,7 @@ bool NixRepl::processLine(std::string line) if (command == ":b" || command == ":bl") { state->store->buildPaths({ DerivedPath::Built { - .drvPath = drvPath, + .drvPath = makeConstantStorePathRef(drvPath), .outputs = OutputsSpec::All { }, }, }); @@ -917,7 +922,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m case nString: str << ANSI_WARNING; - printLiteralString(str, v.string.s); + printLiteralString(str, v.string_view()); str << ANSI_NORMAL; break; diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index ab654c1b0..d12345710 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -132,7 +132,7 @@ std::pair findPackageFilename(EvalState & state, Value & v if (colon == std::string::npos) fail(); std::string filename(fn, 0, colon); auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos)); - return {CanonPath(fn.substr(0, colon)), lineno}; + return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno}; } catch (std::invalid_argument & e) { fail(); abort(); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 9e734e654..10fc799a9 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -50,7 +50,7 @@ struct AttrDb Path cacheDir = getCacheDir() + "/nix/eval-cache-v5"; createDirs(cacheDir); - Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite"; + Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite"; state->db = SQLite(dbPath); state->db.isCache(); @@ -440,8 +440,8 @@ Value & AttrCursor::forceValue() if (root->db && (!cachedValue || std::get_if(&cachedValue->second))) { if (v.type() == nString) - cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context), - string_t{v.string.s, {}}}; + cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()), + string_t{v.c_str(), {}}}; else if (v.type() == nPath) { auto path = v.path().path; cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}}; @@ -582,7 +582,7 @@ std::string AttrCursor::getString() if (v.type() != nString && v.type() != nPath) root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); - return v.type() == nString ? v.string.s : v.path().to_string(); + return v.type() == nString ? v.c_str() : v.path().to_string(); } string_t AttrCursor::getStringWithContext() @@ -599,12 +599,12 @@ string_t AttrCursor::getStringWithContext() return d.drvPath; }, [&](const NixStringContextElem::Built & b) -> const StorePath & { - return b.drvPath; + return b.drvPath->getBaseStorePath(); }, [&](const NixStringContextElem::Opaque & o) -> const StorePath & { return o.path; }, - }, c.raw()); + }, c.raw); if (!root->state.store->isValidPath(path)) { valid = false; break; @@ -624,7 +624,7 @@ string_t AttrCursor::getStringWithContext() if (v.type() == nString) { NixStringContext context; copyContext(v, context); - return {v.string.s, std::move(context)}; + return {v.c_str(), std::move(context)}; } else if (v.type() == nPath) return {v.path().to_string(), {}}; diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 422aaf8d5..93b4a5289 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -63,7 +63,7 @@ Strings EvalSettings::getDefaultNixPath() }; if (!evalSettings.restrictEval && !evalSettings.pureEval) { - add(settings.useXDGBaseDirectories ? getStateDir() + "/nix/defexpr/channels" : getHome() + "/.nix-defexpr/channels"); + add(getNixDefExpr() + "/channels"); add(rootChannelsDir() + "/nixpkgs", "nixpkgs"); add(rootChannelsDir()); } @@ -92,4 +92,11 @@ EvalSettings evalSettings; static GlobalConfig::Register rEvalSettings(&evalSettings); +Path getNixDefExpr() +{ + return settings.useXDGBaseDirectories + ? getStateDir() + "/nix/defexpr" + : getHome() + "/.nix-defexpr"; +} + } diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index 043af6cab..5473d688e 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -21,7 +21,7 @@ struct EvalSettings : Config R"( List of directories to be searched for `<...>` file references - In particular, outside of [pure evaluation mode](#conf-pure-evaluation), this determines the value of + In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath). )"}; @@ -29,10 +29,12 @@ struct EvalSettings : Config this, false, "restrict-eval", R"( If set to `true`, the Nix evaluator will not allow access to any - files outside of the Nix search path (as set via the `NIX_PATH` - environment variable or the `-I` option), or to URIs outside of - [`allowed-uris`](../command-ref/conf-file.md#conf-allowed-uris). - The default is `false`. + files outside of + [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath), + or to URIs outside of + [`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris). + + Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account. )"}; Setting pureEval{this, false, "pure-eval", @@ -40,18 +42,22 @@ struct EvalSettings : Config Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state: - Restrict file system and network access to files specified by cryptographic hash - - Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime) + - Disable impure constants: + - [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) + - [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime) + - [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath) )" }; Setting enableImportFromDerivation{ this, true, "allow-import-from-derivation", R"( - By default, Nix allows you to `import` from a derivation, allowing - building at evaluation time. With this option set to false, Nix will - throw an error when evaluating an expression that uses this feature, - allowing users to ensure their evaluation will not require any - builds to take place. + By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md). + + With this option set to `false`, Nix will throw an error when evaluating an expression that uses this feature, + even when the required store object is readily available. + This ensures that evaluation will not require any builds to take place, + regardless of the state of the store. )"}; Setting allowedUris{this, {}, "allowed-uris", @@ -95,4 +101,9 @@ struct EvalSettings : Config extern EvalSettings evalSettings; +/** + * Conventionally part of the default nix path in impure mode. + */ +Path getNixDefExpr(); + } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index a8e6baea6..d26cde423 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -12,6 +12,8 @@ #include "function-trace.hh" #include "profiles.hh" #include "print.hh" +#include "fs-input-accessor.hh" +#include "memory-input-accessor.hh" #include #include @@ -114,7 +116,7 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, printLiteralBool(str, boolean); break; case tString: - printLiteralString(str, string.s); + printLiteralString(str, string_view()); break; case tPath: str << path().to_string(); // !!! escaping? @@ -339,7 +341,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env) Value nameValue; name.expr->eval(state, env, nameValue); state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name"); - return state.symbols.create(nameValue.string.s); + return state.symbols.create(nameValue.string_view()); } } @@ -503,7 +505,17 @@ EvalState::EvalState( , sOutputSpecified(symbols.create("outputSpecified")) , repair(NoRepair) , emptyBindings(0) - , derivationInternal(rootPath(CanonPath("/builtin/derivation.nix"))) + , rootFS(makeFSInputAccessor(CanonPath::root)) + , corepkgsFS(makeMemoryInputAccessor()) + , internalFS(makeMemoryInputAccessor()) + , derivationInternal{corepkgsFS->addFile( + CanonPath("derivation-internal.nix"), + #include "primops/derivation.nix.gen.hh" + )} + , callFlakeInternal{internalFS->addFile( + CanonPath("call-flake.nix"), + #include "flake/call-flake.nix.gen.hh" + )} , store(store) , buildStore(buildStore ? buildStore : store) , debugRepl(nullptr) @@ -527,9 +539,9 @@ EvalState::EvalState( /* Initialise the Nix expression search path. */ if (!evalSettings.pureEval) { for (auto & i : _searchPath.elements) - addToSearchPath(SearchPath::Elem {i}); + searchPath.elements.emplace_back(SearchPath::Elem {i}); for (auto & i : evalSettings.nixPath.get()) - addToSearchPath(SearchPath::Elem::parse(i)); + searchPath.elements.emplace_back(SearchPath::Elem::parse(i)); } if (evalSettings.restrictEval || evalSettings.pureEval) { @@ -539,7 +551,7 @@ EvalState::EvalState( auto r = resolveSearchPathPath(i.path); if (!r) continue; - auto path = *std::move(r); + auto path = std::move(*r); if (store->isInStore(path)) { try { @@ -555,6 +567,11 @@ EvalState::EvalState( } } + corepkgsFS->addFile( + CanonPath("fetchurl.nix"), + #include "fetchurl.nix.gen.hh" + ); + createBaseEnv(); } @@ -585,6 +602,9 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & SourcePath EvalState::checkSourcePath(const SourcePath & path_) { + // Don't check non-rootFS accessors, they're in a different namespace. + if (path_.accessor != ref(rootFS)) return path_; + if (!allowedPaths) return path_; auto i = resolvedPaths.find(path_.path.abs()); @@ -599,8 +619,6 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_) */ Path abspath = canonPath(path_.path.abs()); - if (hasPrefix(abspath, corepkgsPrefix)) return CanonPath(abspath); - for (auto & i : *allowedPaths) { if (isDirOrInDir(abspath, i)) { found = true; @@ -617,7 +635,7 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_) /* Resolve symlinks. */ debug("checking access to '%s'", abspath); - SourcePath path = CanonPath(canonPath(abspath, true)); + SourcePath path = rootPath(CanonPath(canonPath(abspath, true))); for (auto & i : *allowedPaths) { if (isDirOrInDir(path.path.abs(), i)) { @@ -649,12 +667,12 @@ void EvalState::checkURI(const std::string & uri) /* If the URI is a path, then check it against allowedPaths as well. */ if (hasPrefix(uri, "/")) { - checkSourcePath(CanonPath(uri)); + checkSourcePath(rootPath(CanonPath(uri))); return; } if (hasPrefix(uri, "file://")) { - checkSourcePath(CanonPath(std::string(uri, 7))); + checkSourcePath(rootPath(CanonPath(std::string(uri, 7)))); return; } @@ -950,7 +968,7 @@ void Value::mkStringMove(const char * s, const NixStringContext & context) void Value::mkPath(const SourcePath & path) { - mkPath(makeImmutableString(path.path.abs())); + mkPath(&*path.accessor, makeImmutableString(path.path.abs())); } @@ -1027,24 +1045,67 @@ void EvalState::mkStorePathString(const StorePath & p, Value & v) } +std::string EvalState::mkOutputStringRaw( + const SingleDerivedPath::Built & b, + std::optional optStaticOutputPath, + const ExperimentalFeatureSettings & xpSettings) +{ + /* In practice, this is testing for the case of CA derivations, or + dynamic derivations. */ + return optStaticOutputPath + ? store->printStorePath(std::move(*optStaticOutputPath)) + /* Downstream we would substitute this for an actual path once + we build the floating CA derivation */ + : DownstreamPlaceholder::fromSingleDerivedPathBuilt(b, xpSettings).render(); +} + + void EvalState::mkOutputString( Value & value, - const StorePath & drvPath, - const std::string outputName, - std::optional optOutputPath, + const SingleDerivedPath::Built & b, + std::optional optStaticOutputPath, const ExperimentalFeatureSettings & xpSettings) { value.mkString( - optOutputPath - ? store->printStorePath(*std::move(optOutputPath)) - /* Downstream we would substitute this for an actual path once - we build the floating CA derivation */ - : DownstreamPlaceholder::unknownCaOutput(drvPath, outputName, xpSettings).render(), + mkOutputStringRaw(b, optStaticOutputPath, xpSettings), + NixStringContext { b }); +} + + +std::string EvalState::mkSingleDerivedPathStringRaw( + const SingleDerivedPath & p) +{ + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & o) { + return store->printStorePath(o.path); + }, + [&](const SingleDerivedPath::Built & b) { + auto optStaticOutputPath = std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & o) { + auto drv = store->readDerivation(o.path); + auto i = drv.outputs.find(b.output); + if (i == drv.outputs.end()) + throw Error("derivation '%s' does not have output '%s'", b.drvPath->to_string(*store), b.output); + return i->second.path(*store, drv.name, b.output); + }, + [&](const SingleDerivedPath::Built & o) -> std::optional { + return std::nullopt; + }, + }, b.drvPath->raw()); + return mkOutputStringRaw(b, optStaticOutputPath); + } + }, p.raw()); +} + + +void EvalState::mkSingleDerivedPathString( + const SingleDerivedPath & p, + Value & v) +{ + v.mkString( + mkSingleDerivedPathStringRaw(p), NixStringContext { - NixStringContextElem::Built { - .drvPath = drvPath, - .output = outputName, - } + std::visit([](auto && v) -> NixStringContextElem { return v; }, p), }); } @@ -1122,24 +1183,6 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial if (!e) e = parseExprFromFile(checkSourcePath(resolvedPath)); - cacheFile(path, resolvedPath, e, v, mustBeTrivial); -} - - -void EvalState::resetFileCache() -{ - fileEvalCache.clear(); - fileParseCache.clear(); -} - - -void EvalState::cacheFile( - const SourcePath & path, - const SourcePath & resolvedPath, - Expr * e, - Value & v, - bool mustBeTrivial) -{ fileParseCache[resolvedPath] = e; try { @@ -1168,6 +1211,13 @@ void EvalState::cacheFile( } +void EvalState::resetFileCache() +{ + fileEvalCache.clear(); + fileParseCache.clear(); +} + + void EvalState::eval(Expr * e, Value & v) { e->eval(*this, baseEnv, v); @@ -1300,7 +1350,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) if (nameVal.type() == nNull) continue; state.forceStringNoCtx(nameVal, i.pos, "while evaluating the name of a dynamic attribute"); - auto nameSym = state.symbols.create(nameVal.string.s); + auto nameSym = state.symbols.create(nameVal.string_view()); Bindings::iterator j = v.attrs->find(nameSym); if (j != v.attrs->end()) state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); @@ -1994,7 +2044,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) else if (firstType == nPath) { if (!context.empty()) state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); - v.mkPath(CanonPath(canonPath(str()))); + v.mkPath(state.rootPath(CanonPath(canonPath(str())))); } else v.mkStringMove(c_str(), context); } @@ -2112,7 +2162,7 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string forceValue(v, pos); if (v.type() != nString) error("value is %1% while a string was expected", showType(v)).debugThrow(); - return v.string.s; + return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2139,8 +2189,8 @@ std::string_view EvalState::forceString(Value & v, NixStringContext & context, c std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx) { auto s = forceString(v, pos, errorCtx); - if (v.string.context) { - error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string.s, v.string.context[0]).withTrace(pos, errorCtx).debugThrow(); + if (v.context()) { + error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); } return s; } @@ -2153,7 +2203,7 @@ bool EvalState::isDerivation(Value & v) if (i == v.attrs->end()) return false; forceValue(*i->value, i->pos); if (i->value->type() != nString) return false; - return strcmp(i->value->string.s, "derivation") == 0; + return i->value->string_view().compare("derivation") == 0; } @@ -2185,7 +2235,7 @@ BackedStringView EvalState::coerceToString( if (v.type() == nString) { copyContext(v, context); - return std::string_view(v.string.s); + return v.string_view(); } if (v.type() == nPath) { @@ -2193,7 +2243,7 @@ BackedStringView EvalState::coerceToString( !canonicalizePath && !copyToStore ? // FIXME: hack to preserve path literals that end in a // slash, as in /foo/${x}. - v._path + v._path.path : copyToStore ? store->printStorePath(copyPathToStore(context, v.path())) : std::string(v.path().path.abs()); @@ -2247,7 +2297,7 @@ BackedStringView EvalState::coerceToString( && (!v2->isList() || v2->listSize() != 0)) result += " "; } - return std::move(result); + return result; } } @@ -2267,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = path.fetchToStore(store, path.baseName(), nullptr, repair); + auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); @@ -2283,10 +2333,34 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) { + try { + forceValue(v, pos); + } catch (Error & e) { + e.addTrace(positions[pos], errorCtx); + throw; + } + + /* Handle path values directly, without coercing to a string. */ + if (v.type() == nPath) + return v.path(); + + /* Similarly, handle __toString where the result may be a path + value. */ + if (v.type() == nAttrs) { + auto i = v.attrs->find(sToString); + if (i != v.attrs->end()) { + Value v1; + callFunction(*i->value, v, v1, pos); + return coerceToPath(pos, v1, context, errorCtx); + } + } + + /* Any other value should be coercable to a string, interpreted + relative to the root filesystem. */ auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (path == "" || path[0] != '/') error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); - return CanonPath(path); + return rootPath(CanonPath(path)); } @@ -2299,7 +2373,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon } -std::pair EvalState::coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx) +std::pair EvalState::coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx) { NixStringContext context; auto s = forceString(v, context, pos, errorCtx); @@ -2310,23 +2384,18 @@ std::pair EvalState::coerceToDerivedPathUnchecked s, csize) .withTrace(pos, errorCtx).debugThrow(); auto derivedPath = std::visit(overloaded { - [&](NixStringContextElem::Opaque && o) -> DerivedPath { - return DerivedPath::Opaque { - .path = std::move(o.path), - }; + [&](NixStringContextElem::Opaque && o) -> SingleDerivedPath { + return std::move(o); }, - [&](NixStringContextElem::DrvDeep &&) -> DerivedPath { + [&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath { error( "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", s).withTrace(pos, errorCtx).debugThrow(); }, - [&](NixStringContextElem::Built && b) -> DerivedPath { - return DerivedPath::Built { - .drvPath = std::move(b.drvPath), - .outputs = OutputsSpec::Names { std::move(b.output) }, - }; + [&](NixStringContextElem::Built && b) -> SingleDerivedPath { + return std::move(b); }, - }, ((NixStringContextElem &&) *context.begin()).raw()); + }, ((NixStringContextElem &&) *context.begin()).raw); return { std::move(derivedPath), std::move(s), @@ -2334,41 +2403,29 @@ std::pair EvalState::coerceToDerivedPathUnchecked } -DerivedPath EvalState::coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx) +SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx) { - auto [derivedPath, s_] = coerceToDerivedPathUnchecked(pos, v, errorCtx); + auto [derivedPath, s_] = coerceToSingleDerivedPathUnchecked(pos, v, errorCtx); auto s = s_; - std::visit(overloaded { - [&](const DerivedPath::Opaque & o) { - auto sExpected = store->printStorePath(o.path); - if (s != sExpected) + auto sExpected = mkSingleDerivedPathStringRaw(derivedPath); + if (s != sExpected) { + /* `std::visit` is used here just to provide a more precise + error message. */ + std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & o) { error( "path string '%s' has context with the different path '%s'", s, sExpected) .withTrace(pos, errorCtx).debugThrow(); - }, - [&](const DerivedPath::Built & b) { - // TODO need derived path with single output to make this - // total. Will add as part of RFC 92 work and then this is - // cleaned up. - auto output = *std::get(b.outputs).begin(); - - auto drv = store->readDerivation(b.drvPath); - auto i = drv.outputs.find(output); - if (i == drv.outputs.end()) - throw Error("derivation '%s' does not have output '%s'", store->printStorePath(b.drvPath), output); - auto optOutputPath = i->second.path(*store, drv.name, output); - // This is testing for the case of CA derivations - auto sExpected = optOutputPath - ? store->printStorePath(*optOutputPath) - : DownstreamPlaceholder::unknownCaOutput(b.drvPath, output).render(); - if (s != sExpected) + }, + [&](const SingleDerivedPath::Built & b) { error( "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", - s, output, store->printStorePath(b.drvPath), sExpected) + s, b.output, b.drvPath->to_string(*store), sExpected) .withTrace(pos, errorCtx).debugThrow(); - } - }, derivedPath.raw()); + } + }, derivedPath.raw()); + } return derivedPath; } @@ -2400,10 +2457,13 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.boolean == v2.boolean; case nString: - return strcmp(v1.string.s, v2.string.s) == 0; + return v1.string_view().compare(v2.string_view()) == 0; case nPath: - return strcmp(v1._path, v2._path) == 0; + return + // FIXME: compare accessors by their fingerprint. + v1._path.accessor == v2._path.accessor + && strcmp(v1._path.path, v2._path.path) == 0; case nNull: return true; @@ -2451,10 +2511,37 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v } } -void EvalState::printStats() +bool EvalState::fullGC() { +#if HAVE_BOEHMGC + GC_gcollect(); + // Check that it ran. We might replace this with a version that uses more + // of the boehm API to get this reliably, at a maintenance cost. + // We use a 1K margin because technically this has a race condtion, but we + // probably won't encounter it in practice, because the CLI isn't concurrent + // like that. + return GC_get_bytes_since_gc() < 1024; +#else + return false; +#endif +} + +void EvalState::maybePrintStats() { bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0"; + if (showStats) { + // Make the final heap size more deterministic. +#if HAVE_BOEHMGC + if (!fullGC()) { + warn("failed to perform a full GC before reporting stats"); + } +#endif + printStatistics(); + } +} + +void EvalState::printStatistics() +{ struct rusage buf; getrusage(RUSAGE_SELF, &buf); float cpuTime = buf.ru_utime.tv_sec + ((float) buf.ru_utime.tv_usec / 1000000); @@ -2468,105 +2555,105 @@ void EvalState::printStats() GC_word heapSize, totalBytes; GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes); #endif - if (showStats) { - auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-"); - std::fstream fs; - if (outPath != "-") - fs.open(outPath, std::fstream::out); - json topObj = json::object(); - topObj["cpuTime"] = cpuTime; - topObj["envs"] = { - {"number", nrEnvs}, - {"elements", nrValuesInEnvs}, - {"bytes", bEnvs}, - }; - topObj["list"] = { - {"elements", nrListElems}, - {"bytes", bLists}, - {"concats", nrListConcats}, - }; - topObj["values"] = { - {"number", nrValues}, - {"bytes", bValues}, - }; - topObj["symbols"] = { - {"number", symbols.size()}, - {"bytes", symbols.totalSize()}, - }; - topObj["sets"] = { - {"number", nrAttrsets}, - {"bytes", bAttrsets}, - {"elements", nrAttrsInAttrsets}, - }; - topObj["sizes"] = { - {"Env", sizeof(Env)}, - {"Value", sizeof(Value)}, - {"Bindings", sizeof(Bindings)}, - {"Attr", sizeof(Attr)}, - }; - topObj["nrOpUpdates"] = nrOpUpdates; - topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied; - topObj["nrThunks"] = nrThunks; - topObj["nrAvoided"] = nrAvoided; - topObj["nrLookups"] = nrLookups; - topObj["nrPrimOpCalls"] = nrPrimOpCalls; - topObj["nrFunctionCalls"] = nrFunctionCalls; + + auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-"); + std::fstream fs; + if (outPath != "-") + fs.open(outPath, std::fstream::out); + json topObj = json::object(); + topObj["cpuTime"] = cpuTime; + topObj["envs"] = { + {"number", nrEnvs}, + {"elements", nrValuesInEnvs}, + {"bytes", bEnvs}, + }; + topObj["nrExprs"] = Expr::nrExprs; + topObj["list"] = { + {"elements", nrListElems}, + {"bytes", bLists}, + {"concats", nrListConcats}, + }; + topObj["values"] = { + {"number", nrValues}, + {"bytes", bValues}, + }; + topObj["symbols"] = { + {"number", symbols.size()}, + {"bytes", symbols.totalSize()}, + }; + topObj["sets"] = { + {"number", nrAttrsets}, + {"bytes", bAttrsets}, + {"elements", nrAttrsInAttrsets}, + }; + topObj["sizes"] = { + {"Env", sizeof(Env)}, + {"Value", sizeof(Value)}, + {"Bindings", sizeof(Bindings)}, + {"Attr", sizeof(Attr)}, + }; + topObj["nrOpUpdates"] = nrOpUpdates; + topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied; + topObj["nrThunks"] = nrThunks; + topObj["nrAvoided"] = nrAvoided; + topObj["nrLookups"] = nrLookups; + topObj["nrPrimOpCalls"] = nrPrimOpCalls; + topObj["nrFunctionCalls"] = nrFunctionCalls; #if HAVE_BOEHMGC - topObj["gc"] = { - {"heapSize", heapSize}, - {"totalBytes", totalBytes}, - }; + topObj["gc"] = { + {"heapSize", heapSize}, + {"totalBytes", totalBytes}, + }; #endif - if (countCalls) { - topObj["primops"] = primOpCalls; - { - auto& list = topObj["functions"]; - list = json::array(); - for (auto & [fun, count] : functionCalls) { - json obj = json::object(); - if (fun->name) - obj["name"] = (std::string_view) symbols[fun->name]; - else - obj["name"] = nullptr; - if (auto pos = positions[fun->pos]) { - if (auto path = std::get_if(&pos.origin)) - obj["file"] = path->to_string(); - obj["line"] = pos.line; - obj["column"] = pos.column; - } - obj["count"] = count; - list.push_back(obj); - } - } - { - auto list = topObj["attributes"]; - list = json::array(); - for (auto & i : attrSelects) { - json obj = json::object(); - if (auto pos = positions[i.first]) { - if (auto path = std::get_if(&pos.origin)) - obj["file"] = path->to_string(); - obj["line"] = pos.line; - obj["column"] = pos.column; - } - obj["count"] = i.second; - list.push_back(obj); + if (countCalls) { + topObj["primops"] = primOpCalls; + { + auto& list = topObj["functions"]; + list = json::array(); + for (auto & [fun, count] : functionCalls) { + json obj = json::object(); + if (fun->name) + obj["name"] = (std::string_view) symbols[fun->name]; + else + obj["name"] = nullptr; + if (auto pos = positions[fun->pos]) { + if (auto path = std::get_if(&pos.origin)) + obj["file"] = path->to_string(); + obj["line"] = pos.line; + obj["column"] = pos.column; } + obj["count"] = count; + list.push_back(obj); } } + { + auto list = topObj["attributes"]; + list = json::array(); + for (auto & i : attrSelects) { + json obj = json::object(); + if (auto pos = positions[i.first]) { + if (auto path = std::get_if(&pos.origin)) + obj["file"] = path->to_string(); + obj["line"] = pos.line; + obj["column"] = pos.column; + } + obj["count"] = i.second; + list.push_back(obj); + } + } + } - if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") { - // XXX: overrides earlier assignment - topObj["symbols"] = json::array(); - auto &list = topObj["symbols"]; - symbols.dump([&](const std::string & s) { list.emplace_back(s); }); - } - if (outPath == "-") { - std::cerr << topObj.dump(2) << std::endl; - } else { - fs << topObj.dump(2) << std::endl; - } + if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") { + // XXX: overrides earlier assignment + topObj["symbols"] = json::array(); + auto &list = topObj["symbols"]; + symbols.dump([&](const std::string & s) { list.emplace_back(s); }); + } + if (outPath == "-") { + std::cerr << topObj.dump(2) << std::endl; + } else { + fs << topObj.dump(2) << std::endl; } } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 29d0f05a1..048dff42b 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -22,8 +22,10 @@ namespace nix { class Store; class EvalState; class StorePath; -struct DerivedPath; +struct SingleDerivedPath; enum RepairFlag : bool; +struct FSInputAccessor; +struct MemoryInputAccessor; /** @@ -211,8 +213,26 @@ public: Bindings emptyBindings; + /** + * The accessor for the root filesystem. + */ + const ref rootFS; + + /** + * The in-memory filesystem for paths. + */ + const ref corepkgsFS; + + /** + * In-memory filesystem for internal, non-user-callable Nix + * expressions like call-flake.nix. + */ + const ref internalFS; + const SourcePath derivationInternal; + const SourcePath callFlakeInternal; + /** * Store used to materialise .drv files. */ @@ -223,7 +243,6 @@ public: */ const ref buildStore; - RootValue vCallFlake = nullptr; RootValue vImportedDrvToDerivation = nullptr; /** @@ -341,8 +360,6 @@ public: std::shared_ptr buildStore = nullptr); ~EvalState(); - void addToSearchPath(SearchPath::Elem && elem); - SearchPath getSearchPath() { return searchPath; } /** @@ -407,16 +424,6 @@ public: */ void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false); - /** - * Like `evalFile`, but with an already parsed expression. - */ - void cacheFile( - const SourcePath & path, - const SourcePath & resolvedPath, - Expr * e, - Value & v, - bool mustBeTrivial = false); - void resetFileCache(); /** @@ -426,7 +433,7 @@ public: SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); /** - * Try to resolve a search path value (not the optinal key part) + * Try to resolve a search path value (not the optional key part) * * If the specified search path element is a URI, download it. * @@ -532,12 +539,12 @@ public: StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx); /** - * Part of `coerceToDerivedPath()` without any store IO which is exposed for unit testing only. + * Part of `coerceToSingleDerivedPath()` without any store IO which is exposed for unit testing only. */ - std::pair coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx); + std::pair coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx); /** - * Coerce to `DerivedPath`. + * Coerce to `SingleDerivedPath`. * * Must be a string which is either a literal store path or a * "placeholder (see `DownstreamPlaceholder`). @@ -551,7 +558,7 @@ public: * source of truth, and ultimately tells us what we want, and then * we ensure the string corresponds to it. */ - DerivedPath coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx); + SingleDerivedPath coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx); public: @@ -668,43 +675,68 @@ public: /** * Create a string representing a store path. * - * The string is the printed store path with a context containing a single - * `NixStringContextElem::Opaque` element of that store path. + * The string is the printed store path with a context containing a + * single `NixStringContextElem::Opaque` element of that store path. */ void mkStorePathString(const StorePath & storePath, Value & v); /** - * Create a string representing a `DerivedPath::Built`. + * Create a string representing a `SingleDerivedPath::Built`. * - * The string is the printed store path with a context containing a single - * `NixStringContextElem::Built` element of the drv path and output name. + * The string is the printed store path with a context containing a + * single `NixStringContextElem::Built` element of the drv path and + * output name. * * @param value Value we are settings * - * @param drvPath Path the drv whose output we are making a string for + * @param b the drv whose output we are making a string for, and the + * output * - * @param outputName Name of the output - * - * @param optOutputPath Optional output path for that string. Must - * be passed if and only if output store object is input-addressed. - * Will be printed to form string if passed, otherwise a placeholder - * will be used (see `DownstreamPlaceholder`). + * @param optStaticOutputPath Optional output path for that string. + * Must be passed if and only if output store object is + * input-addressed or fixed output. Will be printed to form string + * if passed, otherwise a placeholder will be used (see + * `DownstreamPlaceholder`). * * @param xpSettings Stop-gap to avoid globals during unit tests. */ void mkOutputString( Value & value, - const StorePath & drvPath, - const std::string outputName, - std::optional optOutputPath, + const SingleDerivedPath::Built & b, + std::optional optStaticOutputPath, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + /** + * Create a string representing a `SingleDerivedPath`. + * + * A combination of `mkStorePathString` and `mkOutputString`. + */ + void mkSingleDerivedPathString( + const SingleDerivedPath & p, + Value & v); + void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx); /** - * Print statistics. + * Print statistics, if enabled. + * + * Performs a full memory GC before printing the statistics, so that the + * GC statistics are more accurate. */ - void printStats(); + void maybePrintStats(); + + /** + * Print statistics, unconditionally, cheaply, without performing a GC first. + */ + void printStatistics(); + + /** + * Perform a full memory garbage collection - not incremental. + * + * @return true if Nix was built with GC and a GC was performed, false if not. + * The return value is currently not thread safe - just the return value. + */ + bool fullGC(); /** * Realise the given context, and return a mapping from the placeholders @@ -714,6 +746,22 @@ public: private: + /** + * Like `mkOutputString` but just creates a raw string, not an + * string Value, which would also have a string context. + */ + std::string mkOutputStringRaw( + const SingleDerivedPath::Built & b, + std::optional optStaticOutputPath, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + + /** + * Like `mkSingleDerivedPathStringRaw` but just creates a raw string + * Value, which would also have a string context. + */ + std::string mkSingleDerivedPathStringRaw( + const SingleDerivedPath & p); + unsigned long nrEnvs = 0; unsigned long nrValuesInEnvs = 0; unsigned long nrValues = 0; @@ -790,8 +838,6 @@ struct InvalidPathError : EvalError #endif }; -static const std::string corepkgsPrefix{"/__corepkgs__/"}; - template void ErrorBuilder::debugThrow() { diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 6a27ea2e8..be2cf014c 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -15,7 +15,7 @@ using namespace flake; namespace flake { -typedef std::pair FetchedFlake; +typedef std::pair FetchedFlake; typedef std::vector> FlakeCache; static std::optional lookupInFlakeCache( @@ -34,7 +34,7 @@ static std::optional lookupInFlakeCache( return std::nullopt; } -static std::tuple fetchOrSubstituteTree( +static std::tuple fetchOrSubstituteTree( EvalState & state, const FlakeRef & originalRef, bool allowLookup, @@ -61,16 +61,16 @@ static std::tuple fetchOrSubstituteTree( flakeCache.push_back({originalRef, *fetched}); } - auto [tree, lockedRef] = *fetched; + auto [storePath, lockedRef] = *fetched; debug("got tree '%s' from '%s'", - state.store->printStorePath(tree.storePath), lockedRef); + state.store->printStorePath(storePath), lockedRef); - state.allowPath(tree.storePath); + state.allowPath(storePath); - assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store)); + assert(!originalRef.input.getNarHash() || storePath == originalRef.input.computeStorePath(*state.store)); - return {std::move(tree), resolvedRef, lockedRef}; + return {std::move(storePath), resolvedRef, lockedRef}; } static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos) @@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(EvalState & state, try { if (attr.name == sUrl) { expectType(state, nString, *attr.value, attr.pos); - url = attr.value->string.s; + url = attr.value->string_view(); attrs.emplace("url", *url); } else if (attr.name == sFlake) { expectType(state, nBool, *attr.value, attr.pos); @@ -122,7 +122,7 @@ static FlakeInput parseFlakeInput(EvalState & state, input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath); } else if (attr.name == sFollows) { expectType(state, nString, *attr.value, attr.pos); - auto follows(parseInputPath(attr.value->string.s)); + auto follows(parseInputPath(attr.value->c_str())); follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end()); input.follows = follows; } else { @@ -131,7 +131,7 @@ static FlakeInput parseFlakeInput(EvalState & state, #pragma GCC diagnostic ignored "-Wswitch-enum" switch (attr.value->type()) { case nString: - attrs.emplace(state.symbols[attr.name], attr.value->string.s); + attrs.emplace(state.symbols[attr.name], attr.value->c_str()); break; case nBool: attrs.emplace(state.symbols[attr.name], Explicit { attr.value->boolean }); @@ -202,34 +202,34 @@ static Flake getFlake( FlakeCache & flakeCache, InputPath lockRootPath) { - auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree( + auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree( state, originalRef, allowLookup, flakeCache); // Guard against symlink attacks. - auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true); + auto flakeDir = canonPath(state.store->toRealPath(storePath) + "/" + lockedRef.subdir, true); auto flakeFile = canonPath(flakeDir + "/flake.nix", true); - if (!isInDir(flakeFile, sourceInfo.actualPath)) + if (!isInDir(flakeFile, state.store->toRealPath(storePath))) throw Error("'flake.nix' file of flake '%s' escapes from '%s'", - lockedRef, state.store->printStorePath(sourceInfo.storePath)); + lockedRef, state.store->printStorePath(storePath)); Flake flake { .originalRef = originalRef, .resolvedRef = resolvedRef, .lockedRef = lockedRef, - .sourceInfo = std::make_shared(std::move(sourceInfo)) + .storePath = storePath, }; if (!pathExists(flakeFile)) throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir); Value vInfo; - state.evalFile(CanonPath(flakeFile), vInfo, true); // FIXME: symlink attack + state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack - expectType(state, nAttrs, vInfo, state.positions.add({CanonPath(flakeFile)}, 1, 1)); + expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1)); if (auto description = vInfo.attrs->get(state.sDescription)) { expectType(state, nString, *description->value, description->pos); - flake.description = description->value->string.s; + flake.description = description->value->c_str(); } auto sInputs = state.symbols.create("inputs"); @@ -346,7 +346,7 @@ LockedFlake lockFlake( // FIXME: symlink attack auto oldLockFile = LockFile::read( lockFlags.referenceLockFilePath.value_or( - flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock")); + state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock")); debug("old lock file: %s", oldLockFile); @@ -520,11 +520,6 @@ LockedFlake lockFlake( } } - auto localPath(parentPath); - // If this input is a path, recurse it down. - // This allows us to resolve path inputs relative to the current flake. - if ((*input.ref).input.getType() == "path") - localPath = absPath(*input.ref->input.getSourcePath(), parentPath); computeLocks( mustRefetch ? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs @@ -579,7 +574,7 @@ LockedFlake lockFlake( oldLock ? std::dynamic_pointer_cast(oldLock) : LockFile::read( - inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(), + state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(), oldLock ? lockRootPath : inputPath, localPath, false); @@ -603,7 +598,7 @@ LockedFlake lockFlake( }; // Bring in the current ref for relative path resolution if we have it - auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true); + auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true); computeLocks( flake.inputs, @@ -734,7 +729,7 @@ void callFlake(EvalState & state, emitTreeAttrs( state, - *lockedFlake.flake.sourceInfo, + lockedFlake.flake.storePath, lockedFlake.flake.lockedRef.input, *vRootSrc, false, @@ -742,14 +737,10 @@ void callFlake(EvalState & state, vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir); - if (!state.vCallFlake) { - state.vCallFlake = allocRootValue(state.allocValue()); - state.eval(state.parseExprFromString( - #include "call-flake.nix.gen.hh" - , CanonPath::root), **state.vCallFlake); - } + auto vCallFlake = state.allocValue(); + state.evalFile(state.callFlakeInternal, *vCallFlake); - state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos); + state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos); state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos); state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos); } @@ -855,7 +846,7 @@ static void prim_flakeRefToString( Explicit { attr.value->boolean }); } else if (t == nString) { attrs.emplace(state.symbols[attr.name], - std::string(attr.value->str())); + std::string(attr.value->string_view())); } else { state.error( "flake reference attribute sets may only contain integers, Booleans, " @@ -898,7 +889,7 @@ Fingerprint LockedFlake::getFingerprint() const // flake.sourceInfo.storePath for the fingerprint. return hashString(htSHA256, fmt("%s;%s;%d;%d;%s", - flake.sourceInfo->storePath.to_string(), + flake.storePath.to_string(), flake.lockedRef.subdir, flake.lockedRef.input.getRevCount().value_or(0), flake.lockedRef.input.getLastModified().value_or(0), diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh index c1d1b71e5..d5ad3eade 100644 --- a/src/libexpr/flake/flake.hh +++ b/src/libexpr/flake/flake.hh @@ -10,8 +10,6 @@ namespace nix { class EvalState; -namespace fetchers { struct Tree; } - namespace flake { struct FlakeInput; @@ -84,7 +82,7 @@ struct Flake */ bool forceDirty = false; std::optional description; - std::shared_ptr sourceInfo; + StorePath storePath; FlakeInputs inputs; /** * 'nixConfig' attribute @@ -193,7 +191,7 @@ void callFlake( void emitTreeAttrs( EvalState & state, - const fetchers::Tree & tree, + const StorePath & storePath, const fetchers::Input & input, Value & v, bool emptyRevFallback = false, diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index d3fa1d557..16f45ace7 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -69,32 +69,130 @@ std::optional maybeParseFlakeRef( } } -std::pair parseFlakeRefWithFragment( +std::pair parsePathFlakeRefWithFragment( const std::string & url, const std::optional & baseDir, bool allowMissing, bool isFlake) { - using namespace fetchers; + std::string path = url; + std::string fragment = ""; + std::map query; + auto pathEnd = url.find_first_of("#?"); + auto fragmentStart = pathEnd; + if (pathEnd != std::string::npos && url[pathEnd] == '?') { + fragmentStart = url.find("#"); + } + if (pathEnd != std::string::npos) { + path = url.substr(0, pathEnd); + } + if (fragmentStart != std::string::npos) { + fragment = percentDecode(url.substr(fragmentStart+1)); + } + if (pathEnd != std::string::npos && fragmentStart != std::string::npos) { + query = decodeQuery(url.substr(pathEnd+1, fragmentStart)); + } - static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+"; + if (baseDir) { + /* Check if 'url' is a path (either absolute or relative + to 'baseDir'). If so, search upward to the root of the + repo (i.e. the directory containing .git). */ - static std::regex pathUrlRegex( - "(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)" - + "(?:\\?(" + queryRegex + "))?" - + "(?:#(" + queryRegex + "))?", - std::regex::ECMAScript); + path = absPath(path, baseDir); + + if (isFlake) { + + if (!allowMissing && !pathExists(path + "/flake.nix")){ + notice("path '%s' does not contain a 'flake.nix', searching up",path); + + // Save device to detect filesystem boundary + dev_t device = lstat(path).st_dev; + bool found = false; + while (path != "/") { + if (pathExists(path + "/flake.nix")) { + found = true; + break; + } else if (pathExists(path + "/.git")) + throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path); + else { + if (lstat(path).st_dev != device) + throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path); + } + path = dirOf(path); + } + if (!found) + throw BadURL("could not find a flake.nix file"); + } + + if (!S_ISDIR(lstat(path).st_mode)) + throw BadURL("path '%s' is not a flake (because it's not a directory)", path); + + if (!allowMissing && !pathExists(path + "/flake.nix")) + throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path); + + auto flakeRoot = path; + std::string subdir; + + while (flakeRoot != "/") { + if (pathExists(flakeRoot + "/.git")) { + auto base = std::string("git+file://") + flakeRoot; + + auto parsedURL = ParsedURL{ + .url = base, // FIXME + .base = base, + .scheme = "git+file", + .authority = "", + .path = flakeRoot, + .query = query, + }; + + if (subdir != "") { + if (parsedURL.query.count("dir")) + throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url); + parsedURL.query.insert_or_assign("dir", subdir); + } + + if (pathExists(flakeRoot + "/.git/shallow")) + parsedURL.query.insert_or_assign("shallow", "1"); + + return std::make_pair( + FlakeRef(fetchers::Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")), + fragment); + } + + subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir); + flakeRoot = dirOf(flakeRoot); + } + } + + } else { + if (!hasPrefix(path, "/")) + throw BadURL("flake reference '%s' is not an absolute path", url); + path = canonPath(path + "/" + getOr(query, "dir", "")); + } + + fetchers::Attrs attrs; + attrs.insert_or_assign("type", "path"); + attrs.insert_or_assign("path", path); + + return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(std::move(attrs)), ""), fragment); +}; + + +/* Check if 'url' is a flake ID. This is an abbreviated syntax for + 'flake:?ref=&rev='. */ +std::optional> parseFlakeIdRef( + const std::string & url, + bool isFlake +) +{ + std::smatch match; static std::regex flakeRegex( "((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" + "(?:#(" + queryRegex + "))?", std::regex::ECMAScript); - std::smatch match; - - /* Check if 'url' is a flake ID. This is an abbreviated syntax for - 'flake:?ref=&rev='. */ - if (std::regex_match(url, match, flakeRegex)) { auto parsedURL = ParsedURL{ .url = url, @@ -105,111 +203,53 @@ std::pair parseFlakeRefWithFragment( }; return std::make_pair( - FlakeRef(Input::fromURL(parsedURL, isFlake), ""), + FlakeRef(fetchers::Input::fromURL(parsedURL, isFlake), ""), percentDecode(match.str(6))); } - else if (std::regex_match(url, match, pathUrlRegex)) { - std::string path = match[1]; - std::string fragment = percentDecode(match.str(3)); + return {}; +} - if (baseDir) { - /* Check if 'url' is a path (either absolute or relative - to 'baseDir'). If so, search upward to the root of the - repo (i.e. the directory containing .git). */ - - path = absPath(path, baseDir); - - if (isFlake) { - - if (!allowMissing && !pathExists(path + "/flake.nix")){ - notice("path '%s' does not contain a 'flake.nix', searching up",path); - - // Save device to detect filesystem boundary - dev_t device = lstat(path).st_dev; - bool found = false; - while (path != "/") { - if (pathExists(path + "/flake.nix")) { - found = true; - break; - } else if (pathExists(path + "/.git")) - throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path); - else { - if (lstat(path).st_dev != device) - throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path); - } - path = dirOf(path); - } - if (!found) - throw BadURL("could not find a flake.nix file"); - } - - if (!S_ISDIR(lstat(path).st_mode)) - throw BadURL("path '%s' is not a flake (because it's not a directory)", path); - - if (!allowMissing && !pathExists(path + "/flake.nix")) - throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path); - - auto flakeRoot = path; - std::string subdir; - - while (flakeRoot != "/") { - if (pathExists(flakeRoot + "/.git")) { - auto base = std::string("git+file://") + flakeRoot; - - auto parsedURL = ParsedURL{ - .url = base, // FIXME - .base = base, - .scheme = "git+file", - .authority = "", - .path = flakeRoot, - .query = decodeQuery(match[2]), - }; - - if (subdir != "") { - if (parsedURL.query.count("dir")) - throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url); - parsedURL.query.insert_or_assign("dir", subdir); - } - - if (pathExists(flakeRoot + "/.git/shallow")) - parsedURL.query.insert_or_assign("shallow", "1"); - - return std::make_pair( - FlakeRef(Input::fromURL(parsedURL, isFlake), getOr(parsedURL.query, "dir", "")), - fragment); - } - - subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir); - flakeRoot = dirOf(flakeRoot); - } - } - - } else { - if (!hasPrefix(path, "/")) - throw BadURL("flake reference '%s' is not an absolute path", url); - auto query = decodeQuery(match[2]); - path = canonPath(path + "/" + getOr(query, "dir", "")); - } - - fetchers::Attrs attrs; - attrs.insert_or_assign("type", "path"); - attrs.insert_or_assign("path", path); - - return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment); +std::optional> parseURLFlakeRef( + const std::string & url, + const std::optional & baseDir, + bool isFlake +) +{ + ParsedURL parsedURL; + try { + parsedURL = parseURL(url); + } catch (BadURL &) { + return std::nullopt; } - else { - auto parsedURL = parseURL(url); - std::string fragment; - std::swap(fragment, parsedURL.fragment); + std::string fragment; + std::swap(fragment, parsedURL.fragment); - auto input = Input::fromURL(parsedURL, isFlake); - input.parent = baseDir; + auto input = fetchers::Input::fromURL(parsedURL, isFlake); + input.parent = baseDir; - return std::make_pair( - FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")), - fragment); + return std::make_pair( + FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")), + fragment); +} + +std::pair parseFlakeRefWithFragment( + const std::string & url, + const std::optional & baseDir, + bool allowMissing, + bool isFlake) +{ + using namespace fetchers; + + std::smatch match; + + if (auto res = parseFlakeIdRef(url, isFlake)) { + return *res; + } else if (auto res = parseURLFlakeRef(url, baseDir, isFlake)) { + return *res; + } else { + return parsePathFlakeRefWithFragment(url, baseDir, allowMissing, isFlake); } } @@ -232,10 +272,10 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs) fetchers::maybeGetStrAttr(attrs, "dir").value_or("")); } -std::pair FlakeRef::fetchTree(ref store) const +std::pair FlakeRef::fetchTree(ref store) const { - auto [tree, lockedInput] = input.fetch(store); - return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)}; + auto [storePath, lockedInput] = input.fetch(store); + return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)}; } std::tuple parseFlakeRefWithFragmentAndExtendedOutputsSpec( @@ -246,7 +286,9 @@ std::tuple parseFlakeRefWithFragment { auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url); auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake); - return {std::move(flakeRef), fragment, extendedOutputsSpec}; + return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)}; } +std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript); + } diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh index a7c9208c0..5d78f49b6 100644 --- a/src/libexpr/flake/flakeref.hh +++ b/src/libexpr/flake/flakeref.hh @@ -6,6 +6,7 @@ #include "fetchers.hh" #include "outputs-spec.hh" +#include #include namespace nix { @@ -62,7 +63,7 @@ struct FlakeRef static FlakeRef fromAttrs(const fetchers::Attrs & attrs); - std::pair fetchTree(ref store) const; + std::pair fetchTree(ref store) const; }; std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef); @@ -91,5 +92,7 @@ std::tuple parseFlakeRefWithFragment bool allowMissing = false, bool isFlake = true); +const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*"; +extern std::regex flakeIdRegex; } diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc index ba2fd46f0..f3ea9063f 100644 --- a/src/libexpr/flake/lockfile.cc +++ b/src/libexpr/flake/lockfile.cc @@ -2,8 +2,10 @@ #include "store-api.hh" #include "url-parts.hh" +#include #include +#include #include namespace nix::flake { @@ -45,16 +47,26 @@ StorePath LockedNode::computeStorePath(Store & store) const return lockedRef.input.computeStorePath(store); } -std::shared_ptr LockFile::findInput(const InputPath & path) -{ + +static std::shared_ptr doFind(const ref& root, const InputPath & path, std::vector& visited) { auto pos = root; + auto found = std::find(visited.cbegin(), visited.cend(), path); + + if(found != visited.end()) { + std::vector cycle; + std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath); + cycle.push_back(printInputPath(path)); + throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle)); + } + visited.push_back(path); + for (auto & elem : path) { if (auto i = get(pos->inputs, elem)) { if (auto node = std::get_if<0>(&*i)) pos = *node; else if (auto follows = std::get_if<1>(&*i)) { - if (auto p = findInput(*follows)) + if (auto p = doFind(root, *follows, visited)) pos = ref(p); else return {}; @@ -66,6 +78,12 @@ std::shared_ptr LockFile::findInput(const InputPath & path) return pos; } +std::shared_ptr LockFile::findInput(const InputPath & path) +{ + std::vector visited; + return doFind(root, path, visited); +} + LockFile::LockFile(const nlohmann::json & json, const Path & path) { auto version = json.value("version", 0); @@ -345,7 +363,7 @@ void LockFile::check() for (auto & [inputPath, input] : inputs) { if (auto follows = std::get_if<1>(&input)) { - if (!follows->empty() && !get(inputs, *follows)) + if (!follows->empty() && !findInput(*follows)) throw Error("input '%s' follows a non-existent input '%s'", printInputPath(inputPath), printInputPath(*follows)); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 506a63677..fe3e6f7ee 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -156,7 +156,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall Outputs result; for (auto elem : outTI->listItems()) { if (elem->type() != nString) throw errMsg; - auto out = outputs.find(elem->string.s); + auto out = outputs.find(elem->c_str()); if (out == outputs.end()) throw errMsg; result.insert(*out); } @@ -230,7 +230,7 @@ std::string DrvInfo::queryMetaString(const std::string & name) { Value * v = queryMeta(name); if (!v || v->type() != nString) return ""; - return v->string.s; + return v->c_str(); } @@ -242,7 +242,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def) if (v->type() == nString) { /* Backwards compatibility with before we had support for integer meta fields. */ - if (auto n = string2Int(v->string.s)) + if (auto n = string2Int(v->c_str())) return *n; } return def; @@ -256,7 +256,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def) if (v->type() == nString) { /* Backwards compatibility with before we had support for float meta fields. */ - if (auto n = string2Float(v->string.s)) + if (auto n = string2Float(v->c_str())) return *n; } return def; @@ -271,8 +271,8 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def) if (v->type() == nString) { /* Backwards compatibility with before we had support for Boolean meta fields. */ - if (strcmp(v->string.s, "true") == 0) return true; - if (strcmp(v->string.s, "false") == 0) return false; + if (v->string_view() == "true") return true; + if (v->string_view() == "false") return false; } return def; } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 4566a1388..22be8e68c 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -76,12 +76,12 @@ void Expr::show(const SymbolTable & symbols, std::ostream & str) const void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const { - str << n; + str << v.integer; } void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const { - str << nf; + str << v.fpoint; } void ExprString::show(const SymbolTable & symbols, std::ostream & str) const diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 5ca3d1fa6..10099d49e 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -20,7 +20,6 @@ MakeError(Abort, EvalError); MakeError(TypeError, EvalError); MakeError(UndefinedVarError, Error); MakeError(MissingArgumentError, EvalError); -MakeError(RestrictedPathError, Error); /** * Position objects. @@ -155,6 +154,10 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) struct Expr { + static unsigned long nrExprs; + Expr() { + nrExprs++; + } virtual ~Expr() { }; virtual void show(const SymbolTable & symbols, std::ostream & str) const; virtual void bindVars(EvalState & es, const std::shared_ptr & env); @@ -171,18 +174,16 @@ struct Expr struct ExprInt : Expr { - NixInt n; Value v; - ExprInt(NixInt n) : n(n) { v.mkInt(n); }; + ExprInt(NixInt n) { v.mkInt(n); }; Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; struct ExprFloat : Expr { - NixFloat nf; Value v; - ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); }; + ExprFloat(NixFloat nf) { v.mkFloat(nf); }; Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; @@ -198,9 +199,13 @@ struct ExprString : Expr struct ExprPath : Expr { + ref accessor; std::string s; Value v; - ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); }; + ExprPath(ref accessor, std::string s) : accessor(accessor), s(std::move(s)) + { + v.mkPath(&*accessor, this->s.c_str()); + } Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS }; @@ -238,7 +243,7 @@ struct ExprSelect : Expr PosIdx pos; Expr * e, * def; AttrPath attrPath; - ExprSelect(const PosIdx & pos, Expr * e, const AttrPath && attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { }; + ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { }; ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); }; PosIdx getPos() const override { return pos; } COMMON_METHODS @@ -248,7 +253,7 @@ struct ExprOpHasAttr : Expr { Expr * e; AttrPath attrPath; - ExprOpHasAttr(Expr * e, const AttrPath && attrPath) : e(e), attrPath(std::move(attrPath)) { }; + ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { }; PosIdx getPos() const override { return e->getPos(); } COMMON_METHODS }; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 201370b90..607795937 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -520,7 +520,7 @@ path_start /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; - $$ = new ExprPath(path); + $$ = new ExprPath(ref(data->state.rootFS), std::move(path)); } | HPATH { if (evalSettings.pureEval) { @@ -530,7 +530,7 @@ path_start ); } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(path); + $$ = new ExprPath(ref(data->state.rootFS), std::move(path)); } ; @@ -646,13 +646,16 @@ formal #include "eval.hh" #include "filetransfer.hh" -#include "fetchers.hh" +#include "tarball.hh" #include "store-api.hh" #include "flake/flake.hh" +#include "fs-input-accessor.hh" +#include "memory-input-accessor.hh" namespace nix { +unsigned long Expr::nrExprs = 0; Expr * EvalState::parse( char * text, @@ -736,12 +739,6 @@ Expr * EvalState::parseStdin() } -void EvalState::addToSearchPath(SearchPath::Elem && elem) -{ - searchPath.elements.emplace_back(std::move(elem)); -} - - SourcePath EvalState::findFile(const std::string_view path) { return findFile(searchPath, path); @@ -761,11 +758,11 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ auto r = *rOpt; Path res = suffix == "" ? r : concatStrings(r, "/", suffix); - if (pathExists(res)) return CanonPath(canonPath(res)); + if (pathExists(res)) return rootPath(CanonPath(canonPath(res))); } if (hasPrefix(path, "nix/")) - return CanonPath(concatStrings(corepkgsPrefix, path.substr(4))); + return {corepkgsFS, CanonPath(path.substr(3))}; debugThrow(ThrownError({ .msg = hintfmt(evalSettings.pureEval @@ -788,7 +785,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa if (EvalSettings::isPseudoUrl(value)) { try { auto storePath = fetchers::downloadTarball( - store, EvalSettings::resolvePseudoUrl(value), "source", false).tree.storePath; + store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath; res = { store->toRealPath(storePath) }; } catch (FileTransferError & e) { logWarning({ @@ -802,7 +799,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa experimentalFeatureSettings.require(Xp::Flakes); auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false); debug("fetching flake search path element '%s''", value); - auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath; + auto storePath = flakeRef.resolve(store).fetchTree(store).first; res = { store->toRealPath(storePath) }; } diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc index 1d690b722..099607638 100644 --- a/src/libexpr/paths.cc +++ b/src/libexpr/paths.cc @@ -1,10 +1,11 @@ #include "eval.hh" +#include "fs-input-accessor.hh" namespace nix { SourcePath EvalState::rootPath(CanonPath path) { - return std::move(path); + return {rootFS, std::move(path)}; } } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 430607214..5033d4e2d 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -56,7 +56,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) .drvPath = b.drvPath, .outputs = OutputsSpec::Names { b.output }, }); - ensureValid(b.drvPath); + ensureValid(b.drvPath->getBaseStorePath()); }, [&](const NixStringContextElem::Opaque & o) { auto ctxS = store->printStorePath(o.path); @@ -69,7 +69,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) res.insert_or_assign(ctxS, ctxS); ensureValid(d.drvPath); }, - }, c.raw()); + }, c.raw); } if (drvs.empty()) return {}; @@ -77,7 +77,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) if (!evalSettings.enableImportFromDerivation) debugThrowLastTrace(Error( "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", - store->printStorePath(drvs.begin()->drvPath))); + drvs.begin()->to_string(*store))); /* Build/substitute the context. */ std::vector buildReqs; @@ -95,7 +95,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context) /* Get all the output paths corresponding to the placeholders we had */ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { res.insert_or_assign( - DownstreamPlaceholder::unknownCaOutput(drv.drvPath, outputName).render(), + DownstreamPlaceholder::fromSingleDerivedPathBuilt( + SingleDerivedPath::Built { + .drvPath = drv.drvPath, + .output = outputName, + }).render(), store->printStorePath(outputPath) ); } @@ -117,13 +121,15 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, co auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path"); try { - StringMap rewrites = state.realiseContext(context); - - auto realPath = state.rootPath(CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))); + if (!context.empty()) { + auto rewrites = state.realiseContext(context); + auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context); + return {path.accessor, CanonPath(realPath)}; + } return flags.checkForPureEval - ? state.checkSourcePath(realPath) - : realPath; + ? state.checkSourcePath(path) + : path; } catch (Error & e) { e.addTrace(state.positions[pos], "while realising the context of path '%s'", path); throw; @@ -152,8 +158,10 @@ static void mkOutputString( { state.mkOutputString( attrs.alloc(o.first), - drvPath, - o.first, + SingleDerivedPath::Built { + .drvPath = makeConstantStorePathRef(drvPath), + .output = o.first, + }, o.second.path(*state.store, Derivation::nameFromPath(drvPath), o.first)); } @@ -196,7 +204,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v state.vImportedDrvToDerivation = allocRootValue(state.allocValue()); state.eval(state.parseExprFromString( #include "imported-drv-to-derivation.nix.gen.hh" - , CanonPath::root), **state.vImportedDrvToDerivation); + , state.rootPath(CanonPath::root)), **state.vImportedDrvToDerivation); } state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh"); @@ -204,12 +212,6 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh"); } - else if (path2 == corepkgsPrefix + "fetchurl.nix") { - state.eval(state.parseExprFromString( - #include "fetchurl.nix.gen.hh" - , CanonPath::root), v); - } - else { if (!vScope) state.evalFile(path, v); @@ -252,64 +254,71 @@ static RegisterPrimOp primop_import({ .args = {"path"}, // TODO turn "normal path values" into link below .doc = R"( - Load, parse and return the Nix expression in the file *path*. - - The value *path* can be a path, a string, or an attribute set with an - `__toString` attribute or a `outPath` attribute (as derivations or flake - inputs typically have). - - If *path* is a directory, the file `default.nix` in that directory - is loaded. - - Evaluation aborts if the file doesn’t exist or contains - an incorrect Nix expression. `import` implements Nix’s module - system: you can put any Nix expression (such as a set or a - function) in a separate file, and use it from Nix expressions in - other files. + Load, parse, and return the Nix expression in the file *path*. > **Note** > > Unlike some languages, `import` is a regular function in Nix. - > Paths using the angle bracket syntax (e.g., `import` *\*) - > are normal [path values](@docroot@/language/values.md#type-path). - A Nix expression loaded by `import` must not contain any *free - variables* (identifiers that are not defined in the Nix expression - itself and are not built-in). Therefore, it cannot refer to - variables that are in scope at the call site. For instance, if you - have a calling expression + The *path* argument must meet the same criteria as an [interpolated expression](@docroot@/language/string-interpolation.md#interpolated-expression). - ```nix - rec { - x = 123; - y = import ./foo.nix; - } - ``` + If *path* is a directory, the file `default.nix` in that directory is used if it exists. - then the following `foo.nix` will give an error: + > **Example** + > + > ```console + > $ echo 123 > default.nix + > ``` + > + > Import `default.nix` from the current directory. + > + > ```nix + > import ./. + > ``` + > + > 123 - ```nix - x + 456 - ``` + Evaluation aborts if the file doesn’t exist or contains an invalid Nix expression. - since `x` is not in scope in `foo.nix`. If you want `x` to be - available in `foo.nix`, you should pass it as a function argument: + A Nix expression loaded by `import` must not contain any *free variables*, that is, identifiers that are not defined in the Nix expression itself and are not built-in. + Therefore, it cannot refer to variables that are in scope at the call site. - ```nix - rec { - x = 123; - y = import ./foo.nix x; - } - ``` - - and - - ```nix - x: x + 456 - ``` - - (The function argument doesn’t have to be called `x` in `foo.nix`; - any name would work.) + > **Example** + > + > If you have a calling expression + > + > ```nix + > rec { + > x = 123; + > y = import ./foo.nix; + > } + > ``` + > + > then the following `foo.nix` will give an error: + > + > ```nix + > # foo.nix + > x + 456 + > ``` + > + > since `x` is not in scope in `foo.nix`. + > If you want `x` to be available in `foo.nix`, pass it as a function argument: + > + > ```nix + > rec { + > x = 123; + > y = import ./foo.nix x; + > } + > ``` + > + > and + > + > ```nix + > # foo.nix + > x: x + 456 + > ``` + > + > The function argument doesn’t have to be called `x` in `foo.nix`; any name would work. )", .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { @@ -584,9 +593,12 @@ struct CompareValues case nFloat: return v1->fpoint < v2->fpoint; case nString: - return strcmp(v1->string.s, v2->string.s) < 0; + return v1->string_view().compare(v2->string_view()) < 0; case nPath: - return strcmp(v1->_path, v2->_path) < 0; + // Note: we don't take the accessor into account + // since it's not obvious how to compare them in a + // reproducible way. + return strcmp(v1->_path.path, v2->_path.path) < 0; case nList: // Lexicographic comparison for (size_t i = 0;; i++) { @@ -721,6 +733,14 @@ static RegisterPrimOp primop_genericClosure(PrimOp { ``` [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ] ``` + + `key` can be one of the following types: + - [Number](@docroot@/language/values.md#type-number) + - [Boolean](@docroot@/language/values.md#type-boolean) + - [String](@docroot@/language/values.md#type-string) + - [Path](@docroot@/language/values.md#type-path) + - [List](@docroot@/language/values.md#list) + )", .fun = prim_genericClosure, }); @@ -976,7 +996,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu { state.forceValue(*args[0], pos); if (args[0]->type() == nString) - printError("trace: %1%", args[0]->string.s); + printError("trace: %1%", args[0]->string_view()); else printError("trace: %1%", printValue(state, *args[0])); state.forceValue(*args[1], pos); @@ -1246,17 +1266,18 @@ drvName, Bindings * attrs, Value & v) state.store->computeFSClosure(d.drvPath, refs); for (auto & j : refs) { drv.inputSrcs.insert(j); - if (j.isDerivation()) - drv.inputDrvs[j] = state.store->readDerivation(j).outputNames(); + if (j.isDerivation()) { + drv.inputDrvs.map[j].value = state.store->readDerivation(j).outputNames(); + } } }, [&](const NixStringContextElem::Built & b) { - drv.inputDrvs[b.drvPath].insert(b.output); + drv.inputDrvs.ensureSlot(*b.drvPath).value.insert(b.output); }, [&](const NixStringContextElem::Opaque & o) { drv.inputSrcs.insert(o.path); }, - }, c.raw()); + }, c.raw); } /* Do we have all required attributes? */ @@ -1325,13 +1346,13 @@ drvName, Bindings * attrs, Value & v) if (isImpure) drv.outputs.insert_or_assign(i, DerivationOutput::Impure { - .method = method.raw, + .method = method, .hashType = ht, }); else drv.outputs.insert_or_assign(i, DerivationOutput::CAFloating { - .method = method.raw, + .method = method, .hashType = ht, }); } @@ -1364,7 +1385,7 @@ drvName, Bindings * attrs, Value & v) drv.env[i] = state.store->printStorePath(outPath); drv.outputs.insert_or_assign( i, - DerivationOutputInputAddressed { + DerivationOutput::InputAddressed { .path = std::move(outPath), }); } @@ -1372,7 +1393,7 @@ drvName, Bindings * attrs, Value & v) ; case DrvHash::Kind::Deferred: for (auto & i : outputs) { - drv.outputs.insert_or_assign(i, DerivationOutputDeferred {}); + drv.outputs.insert_or_assign(i, DerivationOutput::Deferred {}); } } } @@ -1471,7 +1492,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, })); NixStringContext context; - auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath")).path; + auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path; /* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink directly in the store. The latter condition is necessary so e.g. nix-push does the right thing. */ @@ -1511,15 +1532,27 @@ static RegisterPrimOp primop_storePath({ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v) { + auto & arg = *args[0]; + /* We don’t check the path right now, because we don’t want to throw if the path isn’t allowed, but just return false (and we can’t just catch the exception here because we still want to - throw if something in the evaluation of `*args[0]` tries to + throw if something in the evaluation of `arg` tries to access an unauthorized path). */ - auto path = realisePath(state, pos, *args[0], { .checkForPureEval = false }); + auto path = realisePath(state, pos, arg, { .checkForPureEval = false }); + + /* SourcePath doesn't know about trailing slash. */ + auto mustBeDir = arg.type() == nString + && (arg.string_view().ends_with("/") + || arg.string_view().ends_with("/.")); try { - v.mkBool(state.checkSourcePath(path).pathExists()); + auto checked = state.checkSourcePath(path); + auto exists = checked.pathExists(); + if (exists && mustBeDir) { + exists = checked.lstat().type == InputAccessor::tDirectory; + } + v.mkBool(exists); } catch (SysError & e) { /* Don't give away info from errors while canonicalising ‘path’ in restricted mode. */ @@ -1672,13 +1705,14 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V static RegisterPrimOp primop_findFile(PrimOp { .name = "__findFile", - .args = {"search path", "lookup path"}, + .args = {"search-path", "lookup-path"}, .doc = R"( - Look up the given path with the given search path. + Find *lookup-path* in *search-path*. - A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes, `prefix`, and `path`. - `prefix` is a relative path. - `path` denotes a file system location; the exact syntax depends on the command line interface. + A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes: + - `prefix` is a relative path. + - `path` denotes a file system location + The exact syntax depends on the command line interface. Examples of search path attribute sets: @@ -1696,15 +1730,14 @@ static RegisterPrimOp primop_findFile(PrimOp { } ``` - The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match. + The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match: - This is the process for each entry: - If the lookup path matches `prefix`, then the remainder of the lookup path (the "suffix") is searched for within the directory denoted by `patch`. - Note that the `path` may need to be downloaded at this point to look inside. - If the suffix is found inside that directory, then the entry is a match; - the combined absolute path of the directory (now downloaded if need be) and the suffix is returned. + - If *lookup-path* matches `prefix`, then the remainder of *lookup-path* (the "suffix") is searched for within the directory denoted by `path`. + Note that the `path` may need to be downloaded at this point to look inside. + - If the suffix is found inside that directory, then the entry is a match. + The combined absolute path of the directory (now downloaded if need be) and the suffix is returned. - The syntax + [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath): ```nix @@ -1732,7 +1765,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = realisePath(state, pos, *args[1]); - v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false)); + v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false)); } static RegisterPrimOp primop_hashFile({ @@ -1829,6 +1862,45 @@ static RegisterPrimOp primop_readDir({ .fun = prim_readDir, }); +/* Extend single element string context with another output. */ +static void prim_outputOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) +{ + SingleDerivedPath drvPath = state.coerceToSingleDerivedPath(pos, *args[0], "while evaluating the first argument to builtins.outputOf"); + + OutputNameView outputName = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument to builtins.outputOf"); + + state.mkSingleDerivedPathString( + SingleDerivedPath::Built { + .drvPath = make_ref(drvPath), + .output = std::string { outputName }, + }, + v); +} + +static RegisterPrimOp primop_outputOf({ + .name = "__outputOf", + .args = {"derivation-reference", "output-name"}, + .doc = R"( + Return the output path of a derivation, literally or using a placeholder if needed. + + If the derivation has a statically-known output path (i.e. the derivation output is input-addressed, or fixed content-addresed), the output path will just be returned. + But if the derivation is content-addressed or if the derivation is itself not-statically produced (i.e. is the output of another derivation), a placeholder will be returned instead. + + *`derivation reference`* must be a string that may contain a regular store path to a derivation, or may be a placeholder reference. If the derivation is produced by a derivation, you must explicitly select `drv.outPath`. + This primop can be chained arbitrarily deeply. + For instance, + ```nix + builtins.outputOf + (builtins.outputOf myDrv "out) + "out" + ``` + will return a placeholder for the output of the output of `myDrv`. + + This primop corresponds to the `^` sigil for derivable paths, e.g. as part of installable syntax on the command line. + )", + .fun = prim_outputOf, + .experimentalFeature = Xp::DynamicDerivations, +}); /************************************************************* * Creating files @@ -2006,7 +2078,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val StorePathSet refs; for (auto c : context) { - if (auto p = std::get_if(&c)) + if (auto p = std::get_if(&c.raw)) refs.insert(p->path); else state.debugThrowLastTrace(EvalError({ @@ -2138,7 +2210,7 @@ static void addPath( path = evalSettings.pureEval && expectedHash ? path - : state.checkSourcePath(CanonPath(path)).path.abs(); + : state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs(); PathFilter filter = filterFun ? ([&](const Path & path) { auto st = lstat(path); @@ -2171,9 +2243,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - StorePath dstPath = settings.readOnlyMode - ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first - : state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs); + auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); @@ -2190,7 +2260,7 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg { NixStringContext context; auto path = state.coerceToPath(pos, *args[1], context, - "while evaluating the second argument (the path to filter) passed to builtins.filterSource"); + "while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); } @@ -2344,7 +2414,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, (v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]); std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; }); + [](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; }); } static RegisterPrimOp primop_attrNames({ @@ -2485,7 +2555,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args names.reserve(args[1]->listSize()); for (auto elem : args[1]->listItems()) { state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs"); - names.emplace_back(state.symbols.create(elem->string.s), nullptr); + names.emplace_back(state.symbols.create(elem->string_view()), nullptr); } std::sort(names.begin(), names.end()); @@ -2935,7 +3005,7 @@ static RegisterPrimOp primop_tail({ .name = "__tail", .args = {"list"}, .doc = R"( - Return the second to last elements of a list; abort evaluation if + Return the list without its first item; abort evaluation if the argument isn’t a list or is an empty list. > **Warning** @@ -3695,7 +3765,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); - v.mkString(hashString(*ht, s).to_string(Base16, false)); + v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false)); } static RegisterPrimOp primop_hashString({ @@ -3709,6 +3779,101 @@ static RegisterPrimOp primop_hashString({ .fun = prim_hashString, }); +static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args, Value & v) +{ + state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.convertHash"); + auto &inputAttrs = args[0]->attrs; + + Bindings::iterator iteratorHash = getAttr(state, state.symbols.create("hash"), inputAttrs, "while locating the attribute 'hash'"); + auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'"); + + Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo")); + std::optional ht = std::nullopt; + if (iteratorHashAlgo != inputAttrs->end()) { + ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); + } + + Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'"); + HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'")); + + v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI)); +} + +static RegisterPrimOp primop_convertHash({ + .name = "__convertHash", + .args = {"args"}, + .doc = R"( + Return the specified representation of a hash string, based on the attributes presented in *args*: + + - `hash` + + The hash to be converted. + The hash format is detected automatically. + + - `hashAlgo` + + The algorithm used to create the hash. Must be one of + - `"md5"` + - `"sha1"` + - `"sha256"` + - `"sha512"` + + The attribute may be omitted when `hash` is an [SRI hash](https://www.w3.org/TR/SRI/#the-integrity-attribute) or when the hash is prefixed with the hash algorithm name followed by a colon. + That `:` syntax is supported for backwards compatibility with existing tooling. + + - `toHashFormat` + + The format of the resulting hash. Must be one of + - `"base16"` + - `"base32"` + - `"base64"` + - `"sri"` + + The result hash is the *toHashFormat* representation of the hash *hash*. + + > **Example** + > + > Convert a SHA256 hash in Base16 to SRI: + > + > ```nix + > builtins.convertHash { + > hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + > toHashFormat = "sri"; + > hashAlgo = "sha256"; + > } + > ``` + > + > "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" + + > **Example** + > + > Convert a SHA256 hash in SRI to Base16: + > + > ```nix + > builtins.convertHash { + > hash = "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="; + > toHashFormat = "base16"; + > } + > ``` + > + > "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + + > **Example** + > + > Convert a hash in the form `:` in Base16 to SRI: + > + > ```nix + > builtins.convertHash { + > hash = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + > toHashFormat = "sri"; + > } + > ``` + > + > "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" + )", + .fun = prim_convertHash, +}); + struct RegexCache { // TODO use C++20 transparent comparison when available @@ -4331,9 +4496,9 @@ void EvalState::createBaseEnv() addConstant("__nixPath", v, { .type = nList, .doc = R"( - The search path used to resolve angle bracket path lookups. + List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md). - Angle bracket expressions can be + Lookup path expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.findFile`](./builtins.html#builtins-findFile): @@ -4377,12 +4542,7 @@ void EvalState::createBaseEnv() /* Note: we have to initialize the 'derivation' constant *after* building baseEnv/staticBaseEnv because it uses 'builtins'. */ - char code[] = - #include "primops/derivation.nix.gen.hh" - // the parser needs two NUL bytes as terminators; one of them - // is implied by being a C string. - "\0"; - eval(parse(code, sizeof(code), derivationInternal, {CanonPath::root}, staticBaseEnv), *vDerivation); + evalFile(derivationInternal, *vDerivation); } diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 8b3468009..e8542503a 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -51,13 +51,13 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p NixStringContext context2; for (auto && c : context) { - if (auto * ptr = std::get_if(&c)) { + if (auto * ptr = std::get_if(&c.raw)) { context2.emplace(NixStringContextElem::Opaque { .path = ptr->drvPath }); } else { /* Can reuse original item */ - context2.emplace(std::move(c)); + context2.emplace(std::move(c).raw); } } @@ -106,12 +106,15 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args, contextInfos[std::move(d.drvPath)].allOutputs = true; }, [&](NixStringContextElem::Built && b) { - contextInfos[std::move(b.drvPath)].outputs.emplace_back(std::move(b.output)); + // FIXME should eventually show string context as is, no + // resolving here. + auto drvPath = resolveDerivedPath(*state.store, *b.drvPath); + contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output)); }, [&](NixStringContextElem::Opaque && o) { contextInfos[std::move(o.path)].path = true; }, - }, ((NixStringContextElem &&) i).raw()); + }, ((NixStringContextElem &&) i).raw); } auto attrs = state.buildBindings(contextInfos.size()); @@ -222,7 +225,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar for (auto elem : iter->value->listItems()) { auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context"); context.emplace(NixStringContextElem::Built { - .drvPath = namePath, + .drvPath = makeConstantStorePathRef(namePath), .output = std::string { outputName }, }); } diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 7fe8203f4..b86ef6b93 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -133,7 +133,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else if (attrName == "toPath") { state.forceValue(*attr.value, attr.pos); - bool isEmptyString = attr.value->type() == nString && attr.value->string.s == std::string(""); + bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == ""; if (isEmptyString) { toPath = StorePathOrGap {}; } diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index b9ff01c16..e76ce455d 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -71,10 +71,10 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a auto input = fetchers::Input::fromAttrs(std::move(attrs)); // FIXME: use name - auto [tree, input2] = input.fetch(state.store); + auto [storePath, input2] = input.fetch(state.store); auto attrs2 = state.buildBindings(8); - state.mkStorePathString(tree.storePath, attrs2.alloc(state.sOutPath)); + state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath)); if (input2.getRef()) attrs2.alloc("branch").mkString(*input2.getRef()); // Backward compatibility: set 'rev' to @@ -86,7 +86,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a attrs2.alloc("revCount").mkInt(*revCount); v.mkAttrs(attrs2); - state.allowPath(tree.storePath); + state.allowPath(storePath); } static RegisterPrimOp r_fetchMercurial({ diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index f040a3510..a99b0e500 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -5,6 +5,7 @@ #include "fetchers.hh" #include "filetransfer.hh" #include "registry.hh" +#include "tarball.hh" #include "url.hh" #include @@ -15,7 +16,7 @@ namespace nix { void emitTreeAttrs( EvalState & state, - const fetchers::Tree & tree, + const StorePath & storePath, const fetchers::Input & input, Value & v, bool emptyRevFallback, @@ -25,14 +26,13 @@ void emitTreeAttrs( auto attrs = state.buildBindings(10); - - state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath)); + state.mkStorePathString(storePath, attrs.alloc(state.sOutPath)); // FIXME: support arbitrary input attributes. auto narHash = input.getNarHash(); assert(narHash); - attrs.alloc("narHash").mkString(narHash->to_string(SRI, true)); + attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true)); if (input.getType() == "git") attrs.alloc("submodules").mkBool( @@ -71,36 +71,10 @@ void emitTreeAttrs( v.mkAttrs(attrs); } -std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file") -{ - state.checkURI(uri); - if (uri.find("://") == std::string::npos) { - const auto p = ParsedURL { - .scheme = defaultScheme, - .authority = "", - .path = uri - }; - return p.to_string(); - } else { - return uri; - } -} - -std::string fixURIForGit(std::string uri, EvalState & state) -{ - /* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes - * them by removing the `:` and assuming a scheme of `ssh://` - * */ - static std::regex scp_uri("([^/]*)@(.*):(.*)"); - if (uri[0] != '/' && std::regex_match(uri, scp_uri)) - return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh"); - else - return fixURI(uri, state); -} - struct FetchTreeParams { bool emptyRevFallback = false; bool allowNameArgument = false; + bool isFetchGit = false; }; static void fetchTree( @@ -108,11 +82,12 @@ static void fetchTree( const PosIdx pos, Value * * args, Value & v, - std::optional type, const FetchTreeParams & params = FetchTreeParams{} ) { fetchers::Input input; NixStringContext context; + std::optional type; + if (params.isFetchGit) type = "git"; state.forceValue(*args[0], pos); @@ -142,10 +117,8 @@ static void fetchTree( if (attr.value->type() == nPath || attr.value->type() == nString) { auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned(); attrs.emplace(state.symbols[attr.name], - state.symbols[attr.name] == "url" - ? type == "git" - ? fixURIForGit(s, state) - : fixURI(s, state) + params.isFetchGit && state.symbols[attr.name] == "url" + ? fixGitURL(s) : s); } else if (attr.value->type() == nBool) @@ -170,40 +143,86 @@ static void fetchTree( "while evaluating the first argument passed to the fetcher", false, false).toOwned(); - if (type == "git") { + if (params.isFetchGit) { fetchers::Attrs attrs; attrs.emplace("type", "git"); - attrs.emplace("url", fixURIForGit(url, state)); + attrs.emplace("url", fixGitURL(url)); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { - input = fetchers::Input::fromURL(fixURI(url, state)); + if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) + state.debugThrowLastTrace(EvalError({ + .msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"), + .errPos = state.positions[pos] + })); + input = fetchers::Input::fromURL(url); } } - if (!evalSettings.pureEval && !input.isDirect()) + if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes)) input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); - auto [tree, input2] = input.fetch(state.store); + state.checkURI(input.toURLString()); - state.allowPath(tree.storePath); + auto [storePath, input2] = input.fetch(state.store); - emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false); + state.allowPath(storePath); + + emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false); } static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - experimentalFeatureSettings.require(Xp::Flakes); - fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false }); + fetchTree(state, pos, args, v, { }); } -// FIXME: document static RegisterPrimOp primop_fetchTree({ .name = "fetchTree", - .arity = 1, - .fun = prim_fetchTree + .args = {"input"}, + .doc = R"( + Fetch a source tree or a plain file using one of the supported backends. + *input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax. + The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled. + + > **Note** + > + > The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled. + + Here are some examples of how to use `fetchTree`: + + - Fetch a GitHub repository using the attribute set representation: + + ```nix + builtins.fetchTree { + type = "github"; + owner = "NixOS"; + repo = "nixpkgs"; + rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + } + ``` + + This evaluates to the following attribute set: + + ``` + { + lastModified = 1686503798; + lastModifiedDate = "20230611171638"; + narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; + outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; + rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + shortRev = "ae2e6b3"; + } + ``` + + - Fetch the same GitHub repository using the URL-like syntax: + + ``` + builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" + ``` + )", + .fun = prim_fetchTree, }); static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v, @@ -270,7 +289,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v // https://github.com/NixOS/nix/issues/4313 auto storePath = unpack - ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).tree.storePath + ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath; if (expectedHash) { @@ -279,7 +298,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v : hashFile(htSHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true))); + *url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true))); } state.allowAndSetStorePathString(storePath, v); @@ -353,7 +372,12 @@ static RegisterPrimOp primop_fetchTarball({ static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true }); + fetchTree(state, pos, args, v, + FetchTreeParams { + .emptyRevFallback = true, + .allowNameArgument = true, + .isFetchGit = true + }); } static RegisterPrimOp primop_fetchGit({ diff --git a/src/libexpr/search-path.cc b/src/libexpr/search-path.cc index 36bb4c3a5..180d5f8b1 100644 --- a/src/libexpr/search-path.cc +++ b/src/libexpr/search-path.cc @@ -10,7 +10,7 @@ std::optional SearchPath::Prefix::suffixIfPotentialMatch( /* Non-empty prefix and suffix must be separated by a /, or the prefix is not a valid path prefix. */ - bool needSeparator = n > 0 && (path.size() - n) > 0; + bool needSeparator = n > 0 && n < path.size(); if (needSeparator && path[n] != '/') { return std::nullopt; diff --git a/src/libexpr/tests/derived-path.cc b/src/libexpr/tests/derived-path.cc index c713fe28a..d5fc6f201 100644 --- a/src/libexpr/tests/derived-path.cc +++ b/src/libexpr/tests/derived-path.cc @@ -18,15 +18,17 @@ TEST_F(DerivedPathExpressionTest, force_init) { } +#ifndef COVERAGE + RC_GTEST_FIXTURE_PROP( DerivedPathExpressionTest, prop_opaque_path_round_trip, - (const DerivedPath::Opaque & o)) + (const SingleDerivedPath::Opaque & o)) { auto * v = state.allocValue(); state.mkStorePathString(o.path, *v); - auto d = state.coerceToDerivedPath(noPos, *v, ""); - RC_ASSERT(DerivedPath { o } == d); + auto d = state.coerceToSingleDerivedPath(noPos, *v, ""); + RC_ASSERT(SingleDerivedPath { o } == d); } // TODO use DerivedPath::Built for parameter once it supports a single output @@ -34,8 +36,8 @@ RC_GTEST_FIXTURE_PROP( RC_GTEST_FIXTURE_PROP( DerivedPathExpressionTest, - prop_built_path_placeholder_round_trip, - (const StorePath & drvPath, const StorePathName & outputName)) + prop_derived_path_built_placeholder_round_trip, + (const SingleDerivedPath::Built & b)) { /** * We set these in tests rather than the regular globals so we don't have @@ -45,28 +47,22 @@ RC_GTEST_FIXTURE_PROP( mockXpSettings.set("experimental-features", "ca-derivations"); auto * v = state.allocValue(); - state.mkOutputString(*v, drvPath, outputName.name, std::nullopt, mockXpSettings); - auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, ""); - DerivedPath::Built b { - .drvPath = drvPath, - .outputs = OutputsSpec::Names { outputName.name }, - }; - RC_ASSERT(DerivedPath { b } == d); + state.mkOutputString(*v, b, std::nullopt, mockXpSettings); + auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, ""); + RC_ASSERT(SingleDerivedPath { b } == d); } RC_GTEST_FIXTURE_PROP( DerivedPathExpressionTest, - prop_built_path_out_path_round_trip, - (const StorePath & drvPath, const StorePathName & outputName, const StorePath & outPath)) + prop_derived_path_built_out_path_round_trip, + (const SingleDerivedPath::Built & b, const StorePath & outPath)) { auto * v = state.allocValue(); - state.mkOutputString(*v, drvPath, outputName.name, outPath); - auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, ""); - DerivedPath::Built b { - .drvPath = drvPath, - .outputs = OutputsSpec::Names { outputName.name }, - }; - RC_ASSERT(DerivedPath { b } == d); + state.mkOutputString(*v, b, outPath); + auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, ""); + RC_ASSERT(SingleDerivedPath { b } == d); } +#endif + } /* namespace nix */ diff --git a/src/libexpr/tests/error_traces.cc b/src/libexpr/tests/error_traces.cc index 285651256..139366bcd 100644 --- a/src/libexpr/tests/error_traces.cc +++ b/src/libexpr/tests/error_traces.cc @@ -310,7 +310,7 @@ namespace nix { ASSERT_TRACE2("storePath true", TypeError, hintfmt("cannot coerce %s to a string", "a Boolean"), - hintfmt("while evaluating the first argument passed to builtins.storePath")); + hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -378,12 +378,12 @@ namespace nix { ASSERT_TRACE2("filterSource [] []", TypeError, hintfmt("cannot coerce %s to a string", "a list"), - hintfmt("while evaluating the second argument (the path to filter) passed to builtins.filterSource")); + hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", EvalError, hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the second argument (the path to filter) passed to builtins.filterSource")); + hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] ./.", TypeError, @@ -1084,7 +1084,7 @@ namespace nix { ASSERT_TRACE1("hashString \"foo\" \"content\"", UsageError, - hintfmt("unknown hash algorithm '%s'", "foo")); + hintfmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, diff --git a/src/libexpr/tests/flakeref.cc b/src/libexpr/tests/flakeref.cc new file mode 100644 index 000000000..2b7809b93 --- /dev/null +++ b/src/libexpr/tests/flakeref.cc @@ -0,0 +1,22 @@ +#include + +#include "flake/flakeref.hh" + +namespace nix { + +/* ----------- tests for flake/flakeref.hh --------------------------------------------------*/ + + /* ---------------------------------------------------------------------------- + * to_string + * --------------------------------------------------------------------------*/ + + TEST(to_string, doesntReencodeUrl) { + auto s = "http://localhost:8181/test/+3d.tar.gz"; + auto flakeref = parseFlakeRef(s); + auto parsed = flakeref.to_string(); + auto expected = "http://localhost:8181/test/%2B3d.tar.gz"; + + ASSERT_EQ(parsed, expected); + } + +} diff --git a/src/libexpr/tests/json.cc b/src/libexpr/tests/json.cc index 7586bdd9b..f4cc118d6 100644 --- a/src/libexpr/tests/json.cc +++ b/src/libexpr/tests/json.cc @@ -62,7 +62,7 @@ namespace nix { // not supported by store 'dummy'" thrown in the test body. TEST_F(JSONValueTest, DISABLED_Path) { Value v; - v.mkPath("test"); + v.mkPath(state.rootPath(CanonPath("/test"))); ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\""); } } /* namespace nix */ diff --git a/src/libexpr/tests/libexpr.hh b/src/libexpr/tests/libexpr.hh index b8e65aafe..968431446 100644 --- a/src/libexpr/tests/libexpr.hh +++ b/src/libexpr/tests/libexpr.hh @@ -71,7 +71,7 @@ namespace nix { if (arg.type() != nString) { return false; } - return std::string_view(arg.string.s) == s; + return std::string_view(arg.c_str()) == s; } MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) { @@ -103,14 +103,17 @@ namespace nix { } MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) { - if (arg.type() != nPath) { - *result_listener << "Expected a path got " << arg.type(); - return false; - } else if (std::string_view(arg.string.s) != p) { - *result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s; + if (arg.type() != nPath) { + *result_listener << "Expected a path got " << arg.type(); + return false; + } else { + auto path = arg.path(); + if (path.path != CanonPath(p)) { + *result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path; return false; } - return true; + } + return true; } diff --git a/src/libexpr/tests/primops.cc b/src/libexpr/tests/primops.cc index ce3b5d11f..d820b860e 100644 --- a/src/libexpr/tests/primops.cc +++ b/src/libexpr/tests/primops.cc @@ -711,14 +711,14 @@ namespace nix { // FIXME: add a test that verifies the string context is as expected auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\""); ASSERT_EQ(v.type(), nString); - ASSERT_EQ(v.string.s, std::string_view("fabir")); + ASSERT_EQ(v.string_view(), "fabir"); } TEST_F(PrimOpTest, concatStringsSep) { // FIXME: add a test that verifies the string context is as expected auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]"); ASSERT_EQ(v.type(), nString); - ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz"); + ASSERT_EQ(v.string_view(), "foo%bar%baz"); } TEST_F(PrimOpTest, split1) { diff --git a/src/libexpr/tests/value/context.cc b/src/libexpr/tests/value/context.cc index 0d9381577..92d4889ab 100644 --- a/src/libexpr/tests/value/context.cc +++ b/src/libexpr/tests/value/context.cc @@ -8,6 +8,8 @@ namespace nix { +// Test a few cases of invalid string context elements. + TEST(NixStringContextElemTest, empty_invalid) { EXPECT_THROW( NixStringContextElem::parse(""), @@ -38,46 +40,88 @@ TEST(NixStringContextElemTest, slash_invalid) { BadStorePath); } +/** + * Round trip (string <-> data structure) test for + * `NixStringContextElem::Opaque`. + */ TEST(NixStringContextElemTest, opaque) { std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x"; auto elem = NixStringContextElem::parse(opaque); - auto * p = std::get_if(&elem); + auto * p = std::get_if(&elem.raw); ASSERT_TRUE(p); ASSERT_EQ(p->path, StorePath { opaque }); ASSERT_EQ(elem.to_string(), opaque); } +/** + * Round trip (string <-> data structure) test for + * `NixStringContextElem::DrvDeep`. + */ TEST(NixStringContextElemTest, drvDeep) { std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; auto elem = NixStringContextElem::parse(drvDeep); - auto * p = std::get_if(&elem); + auto * p = std::get_if(&elem.raw); ASSERT_TRUE(p); ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) }); ASSERT_EQ(elem.to_string(), drvDeep); } -TEST(NixStringContextElemTest, built) { +/** + * Round trip (string <-> data structure) test for a simpler + * `NixStringContextElem::Built`. + */ +TEST(NixStringContextElemTest, built_opaque) { std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; auto elem = NixStringContextElem::parse(built); - auto * p = std::get_if(&elem); + auto * p = std::get_if(&elem.raw); ASSERT_TRUE(p); ASSERT_EQ(p->output, "foo"); - ASSERT_EQ(p->drvPath, StorePath { built.substr(5) }); + ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { + .path = StorePath { built.substr(5) }, + })); ASSERT_EQ(elem.to_string(), built); } +/** + * Round trip (string <-> data structure) test for a more complex, + * inductive `NixStringContextElem::Built`. + */ +TEST(NixStringContextElemTest, built_built) { + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; + mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations"); + + std::string_view built = "!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; + auto elem = NixStringContextElem::parse(built, mockXpSettings); + auto * p = std::get_if(&elem.raw); + ASSERT_TRUE(p); + ASSERT_EQ(p->output, "foo"); + auto * drvPath = std::get_if(&*p->drvPath); + ASSERT_TRUE(drvPath); + ASSERT_EQ(drvPath->output, "bar"); + ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { + .path = StorePath { built.substr(9) }, + })); + ASSERT_EQ(elem.to_string(), built); +} + +/** + * Without the right experimental features enabled, we cannot parse a + * complex inductive string context element. + */ +TEST(NixStringContextElemTest, built_built_xp) { + ASSERT_THROW( + NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature); +} + } namespace rc { using namespace nix; -Gen Arbitrary::arbitrary() -{ - return gen::just(NixStringContextElem::Opaque { - .path = *gen::arbitrary(), - }); -} - Gen Arbitrary::arbitrary() { return gen::just(NixStringContextElem::DrvDeep { @@ -85,14 +129,6 @@ Gen Arbitrary::arb }); } -Gen Arbitrary::arbitrary() -{ - return gen::just(NixStringContextElem::Built { - .drvPath = *gen::arbitrary(), - .output = (*gen::arbitrary()).name, - }); -} - Gen Arbitrary::arbitrary() { switch (*gen::inRange(0, std::variant_size_v)) { @@ -111,6 +147,8 @@ Gen Arbitrary::arbitrary() namespace nix { +#ifndef COVERAGE + RC_GTEST_PROP( NixStringContextElemTest, prop_round_rip, @@ -119,4 +157,6 @@ RC_GTEST_PROP( RC_ASSERT(o == NixStringContextElem::parse(o.to_string())); } +#endif + } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index ac3986c87..cbc91f509 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -31,7 +31,7 @@ json printValueAsJSON(EvalState & state, bool strict, case nString: copyContext(v, context); - out = v.string.s; + out = v.c_str(); break; case nPath: diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 2539ad1c1..bd7a4ae30 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -74,7 +74,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, case nString: /* !!! show the context? */ copyContext(v, context); - doc.writeEmptyElement("string", singletonAttrs("value", v.string.s)); + doc.writeEmptyElement("string", singletonAttrs("value", v.c_str())); break; case nPath: @@ -96,14 +96,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, if (a != v.attrs->end()) { if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) - xmlAttrs["drvPath"] = drvPath = a->value->string.s; + xmlAttrs["drvPath"] = drvPath = a->value->c_str(); } a = v.attrs->find(state.sOutPath); if (a != v.attrs->end()) { if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) - xmlAttrs["outPath"] = a->value->string.s; + xmlAttrs["outPath"] = a->value->c_str(); } XMLOpenElement _(doc, "derivation", xmlAttrs); diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index c44683e50..622e613ea 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -186,11 +186,15 @@ public: * For canonicity, the store paths should be in sorted order. */ struct { - const char * s; + const char * c_str; const char * * context; // must be in sorted order } string; - const char * _path; + struct { + InputAccessor * accessor; + const char * path; + } _path; + Bindings * attrs; struct { size_t size; @@ -270,7 +274,7 @@ public: inline void mkString(const char * s, const char * * context = 0) { internalType = tString; - string.s = s; + string.c_str = s; string.context = context; } @@ -287,11 +291,12 @@ public: void mkPath(const SourcePath & path); - inline void mkPath(const char * path) + inline void mkPath(InputAccessor * accessor, const char * path) { clearValue(); internalType = tPath; - _path = path; + _path.accessor = accessor; + _path.path = path; } inline void mkNull() @@ -438,13 +443,27 @@ public: SourcePath path() const { assert(internalType == tPath); - return SourcePath{CanonPath(_path)}; + return SourcePath { + .accessor = ref(_path.accessor->shared_from_this()), + .path = CanonPath(CanonPath::unchecked_t(), _path.path) + }; } - std::string_view str() const + std::string_view string_view() const { assert(internalType == tString); - return std::string_view(string.s); + return std::string_view(string.c_str); + } + + const char * const c_str() const + { + assert(internalType == tString); + return string.c_str; + } + + const char * * context() const + { + return string.context; } }; diff --git a/src/libexpr/value/context.cc b/src/libexpr/value/context.cc index f76fc76e4..22361d8fa 100644 --- a/src/libexpr/value/context.cc +++ b/src/libexpr/value/context.cc @@ -4,29 +4,52 @@ namespace nix { -NixStringContextElem NixStringContextElem::parse(std::string_view s0) +NixStringContextElem NixStringContextElem::parse( + std::string_view s0, + const ExperimentalFeatureSettings & xpSettings) { std::string_view s = s0; + std::function parseRest; + parseRest = [&]() -> SingleDerivedPath { + // Case on whether there is a '!' + size_t index = s.find("!"); + if (index == std::string_view::npos) { + return SingleDerivedPath::Opaque { + .path = StorePath { s }, + }; + } else { + std::string output { s.substr(0, index) }; + // Advance string to parse after the '!' + s = s.substr(index + 1); + auto drv = make_ref(parseRest()); + drvRequireExperiment(*drv, xpSettings); + return SingleDerivedPath::Built { + .drvPath = std::move(drv), + .output = std::move(output), + }; + } + }; + if (s.size() == 0) { throw BadNixStringContextElem(s0, "String context element should never be an empty string"); } + switch (s.at(0)) { case '!': { - s = s.substr(1); // advance string to parse after first ! - size_t index = s.find("!"); - // This makes index + 1 safe. Index can be the length (one after index - // of last character), so given any valid character index --- a - // successful find --- we can add one. - if (index == std::string_view::npos) { + // Advance string to parse after the '!' + s = s.substr(1); + + // Find *second* '!' + if (s.find("!") == std::string_view::npos) { throw BadNixStringContextElem(s0, "String content element beginning with '!' should have a second '!'"); } - return NixStringContextElem::Built { - .drvPath = StorePath { s.substr(index + 1) }, - .output = std::string(s.substr(0, index)), - }; + + return std::visit( + [&](auto x) -> NixStringContextElem { return std::move(x); }, + parseRest()); } case '=': { return NixStringContextElem::DrvDeep { @@ -34,33 +57,51 @@ NixStringContextElem NixStringContextElem::parse(std::string_view s0) }; } default: { - return NixStringContextElem::Opaque { - .path = StorePath { s }, - }; + // Ensure no '!' + if (s.find("!") != std::string_view::npos) { + throw BadNixStringContextElem(s0, + "String content element not beginning with '!' should not have a second '!'"); + } + return std::visit( + [&](auto x) -> NixStringContextElem { return std::move(x); }, + parseRest()); } } } -std::string NixStringContextElem::to_string() const { - return std::visit(overloaded { +std::string NixStringContextElem::to_string() const +{ + std::string res; + + std::function toStringRest; + toStringRest = [&](auto & p) { + std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & o) { + res += o.path.to_string(); + }, + [&](const SingleDerivedPath::Built & o) { + res += o.output; + res += '!'; + toStringRest(*o.drvPath); + }, + }, p.raw()); + }; + + std::visit(overloaded { [&](const NixStringContextElem::Built & b) { - std::string res; res += '!'; - res += b.output; - res += '!'; - res += b.drvPath.to_string(); - return res; - }, - [&](const NixStringContextElem::DrvDeep & d) { - std::string res; - res += '='; - res += d.drvPath.to_string(); - return res; + toStringRest(b); }, [&](const NixStringContextElem::Opaque & o) { - return std::string { o.path.to_string() }; + toStringRest(o); }, - }, raw()); + [&](const NixStringContextElem::DrvDeep & d) { + res += '='; + res += d.drvPath.to_string(); + }, + }, raw); + + return res; } } diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 287ae08a9..9f1d59317 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -3,9 +3,8 @@ #include "util.hh" #include "comparator.hh" -#include "path.hh" - -#include +#include "derived-path.hh" +#include "variant-wrapper.hh" #include @@ -26,75 +25,59 @@ public: } }; -/** - * Plain opaque path to some store object. - * - * Encoded as just the path: ‘’. - */ -struct NixStringContextElem_Opaque { - StorePath path; +struct NixStringContextElem { + /** + * Plain opaque path to some store object. + * + * Encoded as just the path: ‘’. + */ + using Opaque = SingleDerivedPath::Opaque; - GENERATE_CMP(NixStringContextElem_Opaque, me->path); -}; + /** + * Path to a derivation and its entire build closure. + * + * The path doesn't just refer to derivation itself and its closure, but + * also all outputs of all derivations in that closure (including the + * root derivation). + * + * Encoded in the form ‘=’. + */ + struct DrvDeep { + StorePath drvPath; -/** - * Path to a derivation and its entire build closure. - * - * The path doesn't just refer to derivation itself and its closure, but - * also all outputs of all derivations in that closure (including the - * root derivation). - * - * Encoded in the form ‘=’. - */ -struct NixStringContextElem_DrvDeep { - StorePath drvPath; + GENERATE_CMP(DrvDeep, me->drvPath); + }; - GENERATE_CMP(NixStringContextElem_DrvDeep, me->drvPath); -}; + /** + * Derivation output. + * + * Encoded in the form ‘!!’. + */ + using Built = SingleDerivedPath::Built; -/** - * Derivation output. - * - * Encoded in the form ‘!!’. - */ -struct NixStringContextElem_Built { - StorePath drvPath; - std::string output; + using Raw = std::variant< + Opaque, + DrvDeep, + Built + >; - GENERATE_CMP(NixStringContextElem_Built, me->drvPath, me->output); -}; + Raw raw; -using _NixStringContextElem_Raw = std::variant< - NixStringContextElem_Opaque, - NixStringContextElem_DrvDeep, - NixStringContextElem_Built ->; + GENERATE_CMP(NixStringContextElem, me->raw); -struct NixStringContextElem : _NixStringContextElem_Raw { - using Raw = _NixStringContextElem_Raw; - using Raw::Raw; - - using Opaque = NixStringContextElem_Opaque; - using DrvDeep = NixStringContextElem_DrvDeep; - using Built = NixStringContextElem_Built; - - inline const Raw & raw() const & { - return static_cast(*this); - } - inline Raw & raw() & { - return static_cast(*this); - } - inline Raw && raw() && { - return static_cast(*this); - } + MAKE_WRAPPER_CONSTRUCTOR(NixStringContextElem); /** * Decode a context string, one of: * - ‘’ * - ‘=’ * - ‘!!’ + * + * @param xpSettings Stop-gap to avoid globals during unit tests. */ - static NixStringContextElem parse(std::string_view s); + static NixStringContextElem parse( + std::string_view s, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); std::string to_string() const; }; diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh index 9f885a793..b9a2c824e 100644 --- a/src/libfetchers/attrs.hh +++ b/src/libfetchers/attrs.hh @@ -13,6 +13,12 @@ namespace nix::fetchers { typedef std::variant> Attr; + +/** + * An `Attrs` can be thought of a JSON object restricted or simplified + * to be "flat", not containing any subcontainers (arrays or objects) + * and also not containing any `null`s. + */ typedef std::map Attrs; Attrs jsonToAttrs(const nlohmann::json & json); diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index e683b9f80..5688c4dc1 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -36,6 +36,7 @@ Input Input::fromURL(const ParsedURL & url, bool requireTree) for (auto & inputScheme : *inputSchemes) { auto res = inputScheme->inputFromURL(url, requireTree); if (res) { + experimentalFeatureSettings.require(inputScheme->experimentalFeature()); res->scheme = inputScheme; fixupInput(*res); return std::move(*res); @@ -50,6 +51,7 @@ Input Input::fromAttrs(Attrs && attrs) for (auto & inputScheme : *inputSchemes) { auto res = inputScheme->inputFromAttrs(attrs); if (res) { + experimentalFeatureSettings.require(inputScheme->experimentalFeature()); res->scheme = inputScheme; fixupInput(*res); return std::move(*res); @@ -82,16 +84,16 @@ std::string Input::to_string() const return toURL().to_string(); } +bool Input::isDirect() const +{ + return !scheme || scheme->isDirect(*this); +} + Attrs Input::toAttrs() const { return attrs; } -bool Input::hasAllInfo() const -{ - return getNarHash() && scheme && scheme->hasAllInfo(*this); -} - bool Input::operator ==(const Input & other) const { return attrs == other.attrs; @@ -107,7 +109,7 @@ bool Input::contains(const Input & other) const return false; } -std::pair Input::fetch(ref store) const +std::pair Input::fetch(ref store) const { if (!scheme) throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs())); @@ -115,7 +117,7 @@ std::pair Input::fetch(ref store) const /* The tree may already be in the Nix store, or it could be substituted (which is often faster than fetching from the original source). So check that. */ - if (hasAllInfo()) { + if (getNarHash()) { try { auto storePath = computeStorePath(*store); @@ -124,7 +126,7 @@ std::pair Input::fetch(ref store) const debug("using substituted/cached input '%s' in '%s'", to_string(), store->printStorePath(storePath)); - return {Tree { .actualPath = store->toRealPath(storePath), .storePath = std::move(storePath) }, *this}; + return {std::move(storePath), *this}; } catch (Error & e) { debug("substitution of input '%s' failed: %s", to_string(), e.what()); } @@ -139,18 +141,16 @@ std::pair Input::fetch(ref store) const } }(); - Tree tree { - .actualPath = store->toRealPath(storePath), - .storePath = storePath, - }; - - auto narHash = store->queryPathInfo(tree.storePath)->narHash; - input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true)); + auto narHash = store->queryPathInfo(storePath)->narHash; + input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); if (auto prevNarHash = getNarHash()) { if (narHash != *prevNarHash) throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'", - to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true)); + to_string(), + store->printStorePath(storePath), + prevNarHash->to_string(HashFormat::SRI, true), + narHash.to_string(HashFormat::SRI, true)); } if (auto prevLastModified = getLastModified()) { @@ -173,9 +173,7 @@ std::pair Input::fetch(ref store) const input.locked = true; - assert(input.hasAllInfo()); - - return {std::move(tree), input}; + return {std::move(storePath), input}; } Input Input::applyOverrides( @@ -254,7 +252,8 @@ std::optional Input::getRev() const try { hash = Hash::parseAnyPrefixed(*s); } catch (BadHash &e) { - // Default to sha1 for backwards compatibility with existing flakes + // Default to sha1 for backwards compatibility with existing + // usages (e.g. `builtins.fetchTree` calls or flake inputs). hash = Hash::parseAny(*s, htSHA1); } } @@ -308,4 +307,9 @@ void InputScheme::clone(const Input & input, const Path & destDir) const throw Error("do not know how to clone input '%s'", input.to_string()); } +std::optional InputScheme::experimentalFeature() +{ + return {}; +} + } diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 6e10e9513..ac605ff8e 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -13,21 +13,14 @@ namespace nix { class Store; } namespace nix::fetchers { -struct Tree -{ - Path actualPath; - StorePath storePath; -}; - struct InputScheme; /** - * The Input object is generated by a specific fetcher, based on the - * user-supplied input attribute in the flake.nix file, and contains + * The `Input` object is generated by a specific fetcher, based on + * user-supplied information, and contains * the information that the specific fetcher needs to perform the * actual fetch. The Input object is most commonly created via the - * "fromURL()" or "fromAttrs()" static functions which are provided - * the url or attrset specified in the flake file. + * `fromURL()` or `fromAttrs()` static functions. */ struct Input { @@ -36,7 +29,6 @@ struct Input std::shared_ptr scheme; // note: can be null Attrs attrs; bool locked = false; - bool direct = true; /** * path of the parent of this input, used for relative path resolution @@ -44,10 +36,20 @@ struct Input std::optional parent; public: + /** + * Create an `Input` from a URL. + * + * The URL indicate which sort of fetcher, and provides information to that fetcher. + */ static Input fromURL(const std::string & url, bool requireTree = true); static Input fromURL(const ParsedURL & url, bool requireTree = true); + /** + * Create an `Input` from a an `Attrs`. + * + * The URL indicate which sort of fetcher, and provides information to that fetcher. + */ static Input fromAttrs(Attrs && attrs); ParsedURL toURL() const; @@ -62,7 +64,7 @@ public: * Check whether this is a "direct" input, that is, not * one that goes through a registry. */ - bool isDirect() const { return direct; } + bool isDirect() const; /** * Check whether this is a "locked" input, that is, @@ -70,24 +72,15 @@ public: */ bool isLocked() const { return locked; } - /** - * Check whether the input carries all necessary info required - * for cache insertion and substitution. - * These fields are used to uniquely identify cached trees - * within the "tarball TTL" window without necessarily - * indicating that the input's origin is unchanged. - */ - bool hasAllInfo() const; - bool operator ==(const Input & other) const; bool contains(const Input & other) const; /** - * Fetch the input into the Nix store, returning the location in - * the Nix store and the locked input. + * Fetch the entire input into the Nix store, returning the + * location in the Nix store and the locked input. */ - std::pair fetch(ref store) const; + std::pair fetch(ref store) const; Input applyOverrides( std::optional ref, @@ -116,13 +109,13 @@ public: /** - * The InputScheme represents a type of fetcher. Each fetcher - * registers with nix at startup time. When processing an input for a - * flake, each scheme is given an opportunity to "recognize" that - * input from the url or attributes in the flake file's specification - * and return an Input object to represent the input if it is - * recognized. The Input object contains the information the fetcher - * needs to actually perform the "fetch()" when called. + * The `InputScheme` represents a type of fetcher. Each fetcher + * registers with nix at startup time. When processing an `Input`, + * each scheme is given an opportunity to "recognize" that + * input from the user-provided url or attributes + * and return an `Input` object to represent the input if it is + * recognized. The `Input` object contains the information the fetcher + * needs to actually perform the `fetch()` when called. */ struct InputScheme { @@ -135,8 +128,6 @@ struct InputScheme virtual ParsedURL toURL(const Input & input) const; - virtual bool hasAllInfo(const Input & input) const = 0; - virtual Input applyOverrides( const Input & input, std::optional ref, @@ -149,37 +140,16 @@ struct InputScheme virtual void markChangedFile(const Input & input, std::string_view file, std::optional commitMsg); virtual std::pair fetch(ref store, const Input & input) = 0; + + /** + * Is this `InputScheme` part of an experimental feature? + */ + virtual std::optional experimentalFeature(); + + virtual bool isDirect(const Input & input) const + { return true; } }; void registerInputScheme(std::shared_ptr && fetcher); -struct DownloadFileResult -{ - StorePath storePath; - std::string etag; - std::string effectiveUrl; - std::optional immutableUrl; -}; - -DownloadFileResult downloadFile( - ref store, - const std::string & url, - const std::string & name, - bool locked, - const Headers & headers = {}); - -struct DownloadTarballResult -{ - Tree tree; - time_t lastModified; - std::optional immutableUrl; -}; - -DownloadTarballResult downloadTarball( - ref store, - const std::string & url, - const std::string & name, - bool locked, - const Headers & headers = {}); - } diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc new file mode 100644 index 000000000..7638d2d82 --- /dev/null +++ b/src/libfetchers/fs-input-accessor.cc @@ -0,0 +1,130 @@ +#include "fs-input-accessor.hh" +#include "posix-source-accessor.hh" +#include "store-api.hh" + +namespace nix { + +struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor +{ + CanonPath root; + std::optional> allowedPaths; + MakeNotAllowedError makeNotAllowedError; + + FSInputAccessorImpl( + const CanonPath & root, + std::optional> && allowedPaths, + MakeNotAllowedError && makeNotAllowedError) + : root(root) + , allowedPaths(std::move(allowedPaths)) + , makeNotAllowedError(std::move(makeNotAllowedError)) + { + } + + void readFile( + const CanonPath & path, + Sink & sink, + std::function sizeCallback) override + { + auto absPath = makeAbsPath(path); + checkAllowed(absPath); + PosixSourceAccessor::readFile(absPath, sink, sizeCallback); + } + + bool pathExists(const CanonPath & path) override + { + auto absPath = makeAbsPath(path); + return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath); + } + + Stat lstat(const CanonPath & path) override + { + auto absPath = makeAbsPath(path); + checkAllowed(absPath); + return PosixSourceAccessor::lstat(absPath); + } + + DirEntries readDirectory(const CanonPath & path) override + { + auto absPath = makeAbsPath(path); + checkAllowed(absPath); + DirEntries res; + for (auto & entry : PosixSourceAccessor::readDirectory(absPath)) + if (isAllowed(absPath + entry.first)) + res.emplace(entry); + return res; + } + + std::string readLink(const CanonPath & path) override + { + auto absPath = makeAbsPath(path); + checkAllowed(absPath); + return PosixSourceAccessor::readLink(absPath); + } + + CanonPath makeAbsPath(const CanonPath & path) + { + return root + path; + } + + void checkAllowed(const CanonPath & absPath) override + { + if (!isAllowed(absPath)) + throw makeNotAllowedError + ? makeNotAllowedError(absPath) + : RestrictedPathError("access to path '%s' is forbidden", absPath); + } + + bool isAllowed(const CanonPath & absPath) + { + if (!absPath.isWithin(root)) + return false; + + if (allowedPaths) { + auto p = absPath.removePrefix(root); + if (!p.isAllowed(*allowedPaths)) + return false; + } + + return true; + } + + void allowPath(CanonPath path) override + { + if (allowedPaths) + allowedPaths->insert(std::move(path)); + } + + bool hasAccessControl() override + { + return (bool) allowedPaths; + } + + std::optional getPhysicalPath(const CanonPath & path) override + { + return makeAbsPath(path); + } +}; + +ref makeFSInputAccessor( + const CanonPath & root, + std::optional> && allowedPaths, + MakeNotAllowedError && makeNotAllowedError) +{ + return make_ref(root, std::move(allowedPaths), std::move(makeNotAllowedError)); +} + +ref makeStorePathAccessor( + ref store, + const StorePath & storePath, + MakeNotAllowedError && makeNotAllowedError) +{ + return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError)); +} + +SourcePath getUnfilteredRootPath(CanonPath path) +{ + static auto rootFS = makeFSInputAccessor(CanonPath::root); + return {rootFS, path}; +} + +} diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh new file mode 100644 index 000000000..19a5211c8 --- /dev/null +++ b/src/libfetchers/fs-input-accessor.hh @@ -0,0 +1,33 @@ +#pragma once + +#include "input-accessor.hh" + +namespace nix { + +class StorePath; +class Store; + +struct FSInputAccessor : InputAccessor +{ + virtual void checkAllowed(const CanonPath & absPath) = 0; + + virtual void allowPath(CanonPath path) = 0; + + virtual bool hasAccessControl() = 0; +}; + +typedef std::function MakeNotAllowedError; + +ref makeFSInputAccessor( + const CanonPath & root, + std::optional> && allowedPaths = {}, + MakeNotAllowedError && makeNotAllowedError = {}); + +ref makeStorePathAccessor( + ref store, + const StorePath & storePath, + MakeNotAllowedError && makeNotAllowedError = {}); + +SourcePath getUnfilteredRootPath(CanonPath path); + +} diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index f8d89ab2f..26b8987d6 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -46,7 +46,7 @@ bool touchCacheFile(const Path & path, time_t touch_time) Path getCachePath(std::string_view key) { return getCacheDir() + "/nix/gitv3/" + - hashString(htSHA256, key).to_string(Base32, false); + hashString(htSHA256, key).to_string(HashFormat::Base32, false); } // Returns the name of the HEAD branch. @@ -293,7 +293,6 @@ struct GitInputScheme : InputScheme if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name" && name != "dirtyRev" && name != "dirtyShortRev") throw Error("unsupported Git input attribute '%s'", name); - parseURL(getStrAttr(attrs, "url")); maybeGetBoolAttr(attrs, "shallow"); maybeGetBoolAttr(attrs, "submodules"); maybeGetBoolAttr(attrs, "allRefs"); @@ -305,6 +304,9 @@ struct GitInputScheme : InputScheme Input input; input.attrs = attrs; + auto url = fixGitURL(getStrAttr(attrs, "url")); + parseURL(url); + input.attrs["url"] = url; return input; } @@ -319,15 +321,6 @@ struct GitInputScheme : InputScheme return url; } - bool hasAllInfo(const Input & input) const override - { - bool maybeDirty = !input.getRef(); - bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false); - return - maybeGetIntAttr(input.attrs, "lastModified") - && (shallow || maybeDirty || maybeGetIntAttr(input.attrs, "revCount")); - } - Input applyOverrides( const Input & input, std::optional ref, @@ -415,7 +408,7 @@ struct GitInputScheme : InputScheme auto checkHashType = [&](const std::optional & hash) { if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256)) - throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true)); + throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); }; auto getLockedAttrs = [&]() diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 291f457f0..617fc7468 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -7,6 +7,7 @@ #include "git.hh" #include "fetchers.hh" #include "fetch-settings.hh" +#include "tarball.hh" #include #include @@ -125,18 +126,13 @@ struct GitArchiveInputScheme : InputScheme auto path = owner + "/" + repo; assert(!(ref && rev)); if (ref) path += "/" + *ref; - if (rev) path += "/" + rev->to_string(Base16, false); + if (rev) path += "/" + rev->to_string(HashFormat::Base16, false); return ParsedURL { .scheme = type(), .path = path, }; } - bool hasAllInfo(const Input & input) const override - { - return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified"); - } - Input applyOverrides( const Input & _input, std::optional ref, @@ -218,10 +214,15 @@ struct GitArchiveInputScheme : InputScheme {"rev", rev->gitRev()}, {"lastModified", uint64_t(result.lastModified)} }, - result.tree.storePath, + result.storePath, true); - return {result.tree.storePath, input}; + return {result.storePath, input}; + } + + std::optional experimentalFeature() override + { + return Xp::Flakes; } }; @@ -291,7 +292,7 @@ struct GitHubInputScheme : GitArchiveInputScheme : "https://api.%s/repos/%s/%s/tarball/%s"; const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input), - input.getRev()->to_string(Base16, false)); + input.getRev()->to_string(HashFormat::Base16, false)); return DownloadUrl { url, headers }; } @@ -357,7 +358,7 @@ struct GitLabInputScheme : GitArchiveInputScheme auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), - input.getRev()->to_string(Base16, false)); + input.getRev()->to_string(HashFormat::Base16, false)); Headers headers = makeHeadersWithAuthTokens(host); return DownloadUrl { url, headers }; @@ -444,7 +445,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), - input.getRev()->to_string(Base16, false)); + input.getRev()->to_string(HashFormat::Base16, false)); Headers headers = makeHeadersWithAuthTokens(host); return DownloadUrl { url, headers }; diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index 4874a43ff..9a71df3d4 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -41,7 +41,6 @@ struct IndirectInputScheme : InputScheme // FIXME: forbid query params? Input input; - input.direct = false; input.attrs.insert_or_assign("type", "indirect"); input.attrs.insert_or_assign("id", id); if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); @@ -63,7 +62,6 @@ struct IndirectInputScheme : InputScheme throw BadURL("'%s' is not a valid flake ID", id); Input input; - input.direct = false; input.attrs = attrs; return input; } @@ -78,11 +76,6 @@ struct IndirectInputScheme : InputScheme return url; } - bool hasAllInfo(const Input & input) const override - { - return false; - } - Input applyOverrides( const Input & _input, std::optional ref, @@ -98,6 +91,14 @@ struct IndirectInputScheme : InputScheme { throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); } + + std::optional experimentalFeature() override + { + return Xp::Flakes; + } + + bool isDirect(const Input & input) const override + { return false; } }; static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc index f37a8058b..d1d450cf7 100644 --- a/src/libfetchers/input-accessor.cc +++ b/src/libfetchers/input-accessor.cc @@ -3,12 +3,52 @@ namespace nix { +StorePath InputAccessor::fetchToStore( + ref store, + const CanonPath & path, + std::string_view name, + FileIngestionMethod method, + PathFilter * filter, + RepairFlag repair) +{ + Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path))); + + auto source = sinkToSource([&](Sink & sink) { + if (method == FileIngestionMethod::Recursive) + dumpPath(path, sink, filter ? *filter : defaultPathFilter); + else + readFile(path, sink); + }); + + auto storePath = + settings.readOnlyMode + ? store->computeStorePathFromDump(*source, name, method, htSHA256).first + : store->addToStoreFromDump(*source, name, method, htSHA256, repair); + + return storePath; +} + +SourcePath InputAccessor::root() +{ + return {ref(shared_from_this()), CanonPath::root}; +} + std::ostream & operator << (std::ostream & str, const SourcePath & path) { str << path.to_string(); return str; } +StorePath SourcePath::fetchToStore( + ref store, + std::string_view name, + FileIngestionMethod method, + PathFilter * filter, + RepairFlag repair) const +{ + return accessor->fetchToStore(store, path, name, method, filter, repair); +} + std::string_view SourcePath::baseName() const { return path.baseName().value_or("source"); @@ -18,60 +58,12 @@ SourcePath SourcePath::parent() const { auto p = path.parent(); assert(p); - return std::move(*p); -} - -InputAccessor::Stat SourcePath::lstat() const -{ - auto st = nix::lstat(path.abs()); - return InputAccessor::Stat { - .type = - S_ISREG(st.st_mode) ? InputAccessor::tRegular : - S_ISDIR(st.st_mode) ? InputAccessor::tDirectory : - S_ISLNK(st.st_mode) ? InputAccessor::tSymlink : - InputAccessor::tMisc, - .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR - }; -} - -std::optional SourcePath::maybeLstat() const -{ - // FIXME: merge these into one operation. - if (!pathExists()) - return {}; - return lstat(); -} - -InputAccessor::DirEntries SourcePath::readDirectory() const -{ - InputAccessor::DirEntries res; - for (auto & entry : nix::readDirectory(path.abs())) { - std::optional type; - switch (entry.type) { - case DT_REG: type = InputAccessor::Type::tRegular; break; - case DT_LNK: type = InputAccessor::Type::tSymlink; break; - case DT_DIR: type = InputAccessor::Type::tDirectory; break; - } - res.emplace(entry.name, type); - } - return res; -} - -StorePath SourcePath::fetchToStore( - ref store, - std::string_view name, - PathFilter * filter, - RepairFlag repair) const -{ - return - settings.readOnlyMode - ? store->computeStorePathForPath(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter).first - : store->addToStore(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter, repair); + return {accessor, std::move(*p)}; } SourcePath SourcePath::resolveSymlinks() const { - SourcePath res(CanonPath::root); + auto res = accessor->root(); int linksAllowed = 1024; diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh index 5a2f17f62..5dc05a363 100644 --- a/src/libfetchers/input-accessor.hh +++ b/src/libfetchers/input-accessor.hh @@ -1,40 +1,39 @@ #pragma once +#include "source-accessor.hh" #include "ref.hh" #include "types.hh" -#include "archive.hh" -#include "canon-path.hh" #include "repair-flag.hh" +#include "content-address.hh" namespace nix { +MakeError(RestrictedPathError, Error); + +struct SourcePath; class StorePath; class Store; -struct InputAccessor +struct InputAccessor : SourceAccessor, std::enable_shared_from_this { - enum Type { - tRegular, tSymlink, tDirectory, - /** - Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things. - - Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`. - - Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types. - */ - tMisc - }; - - struct Stat + /** + * Return the maximum last-modified time of the files in this + * tree, if available. + */ + virtual std::optional getLastModified() { - Type type = tMisc; - //uint64_t fileSize = 0; // regular files only - bool isExecutable = false; // regular files only - }; + return std::nullopt; + } - typedef std::optional DirEntry; + StorePath fetchToStore( + ref store, + const CanonPath & path, + std::string_view name = "source", + FileIngestionMethod method = FileIngestionMethod::Recursive, + PathFilter * filter = nullptr, + RepairFlag repair = NoRepair); - typedef std::map DirEntries; + SourcePath root(); }; /** @@ -45,12 +44,9 @@ struct InputAccessor */ struct SourcePath { + ref accessor; CanonPath path; - SourcePath(CanonPath path) - : path(std::move(path)) - { } - std::string_view baseName() const; /** @@ -64,39 +60,42 @@ struct SourcePath * return its contents; otherwise throw an error. */ std::string readFile() const - { return nix::readFile(path.abs()); } + { return accessor->readFile(path); } /** * Return whether this `SourcePath` denotes a file (of any type) * that exists */ bool pathExists() const - { return nix::pathExists(path.abs()); } + { return accessor->pathExists(path); } /** * Return stats about this `SourcePath`, or throw an exception if * it doesn't exist. */ - InputAccessor::Stat lstat() const; + InputAccessor::Stat lstat() const + { return accessor->lstat(path); } /** * Return stats about this `SourcePath`, or std::nullopt if it * doesn't exist. */ - std::optional maybeLstat() const; + std::optional maybeLstat() const + { return accessor->maybeLstat(path); } /** * If this `SourcePath` denotes a directory (not a symlink), * return its directory entries; otherwise throw an error. */ - InputAccessor::DirEntries readDirectory() const; + InputAccessor::DirEntries readDirectory() const + { return accessor->readDirectory(path); } /** * If this `SourcePath` denotes a symlink, return its target; * otherwise throw an error. */ std::string readLink() const - { return nix::readLink(path.abs()); } + { return accessor->readLink(path); } /** * Dump this `SourcePath` to `sink` as a NAR archive. @@ -104,7 +103,7 @@ struct SourcePath void dumpPath( Sink & sink, PathFilter & filter = defaultPathFilter) const - { return nix::dumpPath(path.abs(), sink, filter); } + { return accessor->dumpPath(path, sink, filter); } /** * Copy this `SourcePath` to the Nix store. @@ -112,6 +111,7 @@ struct SourcePath StorePath fetchToStore( ref store, std::string_view name = "source", + FileIngestionMethod method = FileIngestionMethod::Recursive, PathFilter * filter = nullptr, RepairFlag repair = NoRepair) const; @@ -120,7 +120,7 @@ struct SourcePath * it has a physical location. */ std::optional getPhysicalPath() const - { return path; } + { return accessor->getPhysicalPath(path); } std::string to_string() const { return path.abs(); } @@ -129,7 +129,7 @@ struct SourcePath * Append a `CanonPath` to this path. */ SourcePath operator + (const CanonPath & x) const - { return {path + x}; } + { return {accessor, path + x}; } /** * Append a single component `c` to this path. `c` must not @@ -137,21 +137,21 @@ struct SourcePath * and `c`. */ SourcePath operator + (std::string_view c) const - { return {path + c}; } + { return {accessor, path + c}; } bool operator == (const SourcePath & x) const { - return path == x.path; + return std::tie(accessor, path) == std::tie(x.accessor, x.path); } bool operator != (const SourcePath & x) const { - return path != x.path; + return std::tie(accessor, path) != std::tie(x.accessor, x.path); } bool operator < (const SourcePath & x) const { - return path < x.path; + return std::tie(accessor, path) < std::tie(x.accessor, x.path); } /** diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc new file mode 100644 index 000000000..817d063ba --- /dev/null +++ b/src/libfetchers/memory-input-accessor.cc @@ -0,0 +1,54 @@ +#include "memory-input-accessor.hh" + +namespace nix { + +struct MemoryInputAccessorImpl : MemoryInputAccessor +{ + std::map files; + + std::string readFile(const CanonPath & path) override + { + auto i = files.find(path); + if (i == files.end()) + throw Error("file '%s' does not exist", path); + return i->second; + } + + bool pathExists(const CanonPath & path) override + { + auto i = files.find(path); + return i != files.end(); + } + + Stat lstat(const CanonPath & path) override + { + auto i = files.find(path); + if (i != files.end()) + return Stat { .type = tRegular, .isExecutable = false }; + throw Error("file '%s' does not exist", path); + } + + DirEntries readDirectory(const CanonPath & path) override + { + return {}; + } + + std::string readLink(const CanonPath & path) override + { + throw UnimplementedError("MemoryInputAccessor::readLink"); + } + + SourcePath addFile(CanonPath path, std::string && contents) override + { + files.emplace(path, std::move(contents)); + + return {ref(shared_from_this()), std::move(path)}; + } +}; + +ref makeMemoryInputAccessor() +{ + return make_ref(); +} + +} diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh new file mode 100644 index 000000000..b75b02bfd --- /dev/null +++ b/src/libfetchers/memory-input-accessor.hh @@ -0,0 +1,15 @@ +#include "input-accessor.hh" + +namespace nix { + +/** + * An input accessor for an in-memory file system. + */ +struct MemoryInputAccessor : InputAccessor +{ + virtual SourcePath addFile(CanonPath path, std::string && contents) = 0; +}; + +ref makeMemoryInputAccessor(); + +} diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 51fd1ed42..f830a3271 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -98,13 +98,6 @@ struct MercurialInputScheme : InputScheme return url; } - bool hasAllInfo(const Input & input) const override - { - // FIXME: ugly, need to distinguish between dirty and clean - // default trees. - return input.getRef() == "default" || maybeGetIntAttr(input.attrs, "revCount"); - } - Input applyOverrides( const Input & input, std::optional ref, @@ -206,7 +199,7 @@ struct MercurialInputScheme : InputScheme auto checkHashType = [&](const std::optional & hash) { if (hash.has_value() && hash->type != htSHA1) - throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true)); + throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true)); }; @@ -252,7 +245,7 @@ struct MercurialInputScheme : InputScheme } } - Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false)); + Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 01f1be978..d829609b5 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -66,11 +66,6 @@ struct PathInputScheme : InputScheme }; } - bool hasAllInfo(const Input & input) const override - { - return true; - } - std::optional getSourcePath(const Input & input) override { return getStrAttr(input.attrs, "path"); @@ -125,6 +120,11 @@ struct PathInputScheme : InputScheme return {std::move(*storePath), input}; } + + std::optional experimentalFeature() override + { + return Xp::Flakes; + } }; static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); }); diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 43c03beec..a0fff9ceb 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -1,5 +1,5 @@ #include "registry.hh" -#include "fetchers.hh" +#include "tarball.hh" #include "util.hh" #include "globals.hh" #include "store-api.hh" diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 107d38e92..e1ea9b58b 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -1,3 +1,4 @@ +#include "tarball.hh" #include "fetchers.hh" #include "cache.hh" #include "filetransfer.hh" @@ -133,7 +134,7 @@ DownloadTarballResult downloadTarball( if (cached && !cached->expired) return { - .tree = Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) }, + .storePath = std::move(cached->storePath), .lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"), .immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"), }; @@ -174,7 +175,7 @@ DownloadTarballResult downloadTarball( locked); return { - .tree = Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) }, + .storePath = std::move(*unpackedStorePath), .lastModified = lastModified, .immutableUrl = res.immutableUrl, }; @@ -232,7 +233,7 @@ struct CurlInputScheme : InputScheme if (type != inputType()) return {}; // FIXME: some of these only apply to TarballInputScheme. - std::set allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount"}; + std::set allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount", "lastModified"}; for (auto & [name, value] : attrs) if (!allowedNames.count(name)) throw Error("unsupported %s input attribute '%s'", *type, name); @@ -250,15 +251,9 @@ struct CurlInputScheme : InputScheme // NAR hashes are preferred over file hashes since tar/zip // files don't have a canonical representation. if (auto narHash = input.getNarHash()) - url.query.insert_or_assign("narHash", narHash->to_string(SRI, true)); + url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true)); return url; } - - bool hasAllInfo(const Input & input) const override - { - return true; - } - }; struct FileInputScheme : CurlInputScheme @@ -310,7 +305,10 @@ struct TarballInputScheme : CurlInputScheme input = immutableInput; } - return {result.tree.storePath, std::move(input)}; + if (result.lastModified && !input.attrs.contains("lastModified")) + input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified)); + + return {result.storePath, std::move(input)}; } }; diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh new file mode 100644 index 000000000..9e6b50b31 --- /dev/null +++ b/src/libfetchers/tarball.hh @@ -0,0 +1,43 @@ +#pragma once + +#include "types.hh" +#include "path.hh" + +#include + +namespace nix { +class Store; +} + +namespace nix::fetchers { + +struct DownloadFileResult +{ + StorePath storePath; + std::string etag; + std::string effectiveUrl; + std::optional immutableUrl; +}; + +DownloadFileResult downloadFile( + ref store, + const std::string & url, + const std::string & name, + bool locked, + const Headers & headers = {}); + +struct DownloadTarballResult +{ + StorePath storePath; + time_t lastModified; + std::optional immutableUrl; +}; + +DownloadTarballResult downloadTarball( + ref store, + const std::string & url, + const std::string & name, + bool locked, + const Headers & headers = {}); + +} diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index f92920d18..205b77808 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -1,4 +1,5 @@ #include "common-args.hh" +#include "args/root.hh" #include "globals.hh" #include "loggers.hh" @@ -34,21 +35,21 @@ MixCommonArgs::MixCommonArgs(const std::string & programName) .description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).", .category = miscCategory, .labels = {"name", "value"}, - .handler = {[](std::string name, std::string value) { + .handler = {[this](std::string name, std::string value) { try { globalConfig.set(name, value); } catch (UsageError & e) { - if (!completions) + if (!getRoot().completions) warn(e.what()); } }}, - .completer = [](size_t index, std::string_view prefix) { + .completer = [](AddCompletions & completions, size_t index, std::string_view prefix) { if (index == 0) { std::map settings; globalConfig.getSettings(settings); for (auto & s : settings) if (hasPrefix(s.first, prefix)) - completions->add(s.first, fmt("Set the `%s` setting.", s.first)); + completions.add(s.first, fmt("Set the `%s` setting.", s.first)); } } }); diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index 6600ec177..45b1fdfd1 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -108,7 +108,8 @@ public: stop(); } - void stop() override + /* Called by destructor, can't be overridden */ + void stop() override final { { auto state(state_.lock()); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 56f47a4ac..9c2ad039a 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -379,9 +379,9 @@ RunPager::RunPager() }); pid.setKillSignal(SIGINT); - stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0); + std_out = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0); if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1) - throw SysError("dupping stdout"); + throw SysError("dupping standard output"); } @@ -390,7 +390,7 @@ RunPager::~RunPager() try { if (pid != -1) { std::cout.flush(); - dup2(stdout, STDOUT_FILENO); + dup2(std_out, STDOUT_FILENO); pid.wait(); } } catch (...) { diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 7a9e83c6c..3159fe479 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -3,6 +3,7 @@ #include "util.hh" #include "args.hh" +#include "args/root.hh" #include "common-args.hh" #include "path.hh" #include "derived-path.hh" @@ -66,7 +67,7 @@ template N getIntArg(const std::string & opt, } -struct LegacyArgs : public MixCommonArgs +struct LegacyArgs : public MixCommonArgs, public RootArgs { std::function parseArg; @@ -85,8 +86,9 @@ struct LegacyArgs : public MixCommonArgs void showManPage(const std::string & name); /** - * The constructor of this class starts a pager if stdout is a - * terminal and $PAGER is set. Stdout is redirected to the pager. + * The constructor of this class starts a pager if standard output is a + * terminal and $PAGER is set. Standard output is redirected to the + * pager. */ class RunPager { @@ -96,7 +98,7 @@ public: private: Pid pid; - int stdout; + int std_out; }; extern volatile ::sig_atomic_t blockInt; diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index b4fea693f..2a91233ec 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -164,7 +164,7 @@ ref BinaryCacheStore::addToStoreCommon( auto [fileHash, fileSize] = fileHashSink.finish(); narInfo->fileHash = fileHash; narInfo->fileSize = fileSize; - narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar" + narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar" + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : compression == "zstd" ? ".zst" : diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc new file mode 100644 index 000000000..18f519c5c --- /dev/null +++ b/src/libstore/build-result.cc @@ -0,0 +1,18 @@ +#include "build-result.hh" + +namespace nix { + +GENERATE_CMP_EXT( + , + BuildResult, + me->status, + me->errorMsg, + me->timesBuilt, + me->isNonDeterministic, + me->builtOutputs, + me->startTime, + me->stopTime, + me->cpuUser, + me->cpuSystem); + +} diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh index b7a56e791..8840fa7e3 100644 --- a/src/libstore/build-result.hh +++ b/src/libstore/build-result.hh @@ -3,6 +3,7 @@ #include "realisation.hh" #include "derived-path.hh" +#include "comparator.hh" #include #include @@ -100,6 +101,8 @@ struct BuildResult */ std::optional cpuUser, cpuSystem; + DECLARE_CMP(BuildResult); + bool success() { return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 5e37f7ecb..360c6b70b 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -8,8 +8,8 @@ #include "util.hh" #include "archive.hh" #include "compression.hh" -#include "worker-protocol.hh" -#include "worker-protocol-impl.hh" +#include "common-protocol.hh" +#include "common-protocol-impl.hh" #include "topo-sort.hh" #include "callback.hh" #include "local-store.hh" // TODO remove, along with remaining downcasts @@ -65,7 +65,7 @@ namespace nix { DerivationGoal::DerivationGoal(const StorePath & drvPath, const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode) - : Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs }) + : Goal(worker, DerivedPath::Built { .drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs }) , useDerivation(true) , drvPath(drvPath) , wantedOutputs(wantedOutputs) @@ -74,7 +74,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, state = &DerivationGoal::getDerivation; name = fmt( "building of '%s' from .drv file", - DerivedPath::Built { drvPath, wantedOutputs }.to_string(worker.store)); + DerivedPath::Built { makeConstantStorePathRef(drvPath), wantedOutputs }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -84,7 +84,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode) - : Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs }) + : Goal(worker, DerivedPath::Built { .drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs }) , useDerivation(false) , drvPath(drvPath) , wantedOutputs(wantedOutputs) @@ -95,7 +95,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation state = &DerivationGoal::haveDerivation; name = fmt( "building of '%s' from in-memory derivation", - DerivedPath::Built { drvPath, drv.outputNames() }.to_string(worker.store)); + DerivedPath::Built { makeConstantStorePathRef(drvPath), drv.outputNames() }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -368,20 +368,37 @@ void DerivationGoal::gaveUpOnSubstitution() /* The inputs must be built before we can build this goal. */ inputDrvOutputs.clear(); - if (useDerivation) - for (auto & i : dynamic_cast(drv.get())->inputDrvs) { + if (useDerivation) { + std::function, const DerivedPathMap::ChildNode &)> addWaiteeDerivedPath; + + addWaiteeDerivedPath = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { + if (!inputNode.value.empty()) + addWaitee(worker.makeGoal( + DerivedPath::Built { + .drvPath = inputDrv, + .outputs = inputNode.value, + }, + buildMode == bmRepair ? bmRepair : bmNormal)); + for (const auto & [outputName, childNode] : inputNode.childMap) + addWaiteeDerivedPath( + make_ref(SingleDerivedPath::Built { inputDrv, outputName }), + childNode); + }; + + for (const auto & [inputDrvPath, inputNode] : dynamic_cast(drv.get())->inputDrvs.map) { /* Ensure that pure, non-fixed-output derivations don't depend on impure derivations. */ if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) { - auto inputDrv = worker.evalStore.readDerivation(i.first); + auto inputDrv = worker.evalStore.readDerivation(inputDrvPath); if (!inputDrv.type().isPure()) throw Error("pure derivation '%s' depends on impure derivation '%s'", worker.store.printStorePath(drvPath), - worker.store.printStorePath(i.first)); + worker.store.printStorePath(inputDrvPath)); } - addWaitee(worker.makeDerivationGoal(i.first, i.second, buildMode == bmRepair ? bmRepair : bmNormal)); + addWaiteeDerivedPath(makeConstantStorePathRef(inputDrvPath), inputNode); } + } /* Copy the input sources from the eval store to the build store. */ @@ -452,7 +469,12 @@ void DerivationGoal::repairClosure() if (drvPath2 == outputsToDrv.end()) addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair))); else - addWaitee(worker.makeDerivationGoal(drvPath2->second, OutputsSpec::All(), bmRepair)); + addWaitee(worker.makeGoal( + DerivedPath::Built { + .drvPath = makeConstantStorePathRef(drvPath2->second), + .outputs = OutputsSpec::All { }, + }, + bmRepair)); } if (waitees.empty()) { @@ -509,7 +531,7 @@ void DerivationGoal::inputsRealised() return ia.deferred; }, [&](const DerivationType::ContentAddressed & ca) { - return !fullDrv.inputDrvs.empty() && ( + return !fullDrv.inputDrvs.map.empty() && ( ca.fixed /* Can optionally resolve if fixed, which is good for avoiding unnecessary rebuilds. */ @@ -521,9 +543,9 @@ void DerivationGoal::inputsRealised() [&](const DerivationType::Impure &) { return true; } - }, drvType.raw()); + }, drvType.raw); - if (resolveDrv && !fullDrv.inputDrvs.empty()) { + if (resolveDrv && !fullDrv.inputDrvs.map.empty()) { experimentalFeatureSettings.require(Xp::CaDerivations); /* We are be able to resolve this derivation based on the @@ -539,7 +561,7 @@ void DerivationGoal::inputsRealised() attempt = fullDrv.tryResolve(worker.store); } assert(attempt); - Derivation drvResolved { *std::move(attempt) }; + Derivation drvResolved { std::move(*attempt) }; auto pathResolved = writeDerivation(worker.store, drvResolved); @@ -560,11 +582,13 @@ void DerivationGoal::inputsRealised() return; } - for (auto & [depDrvPath, wantedDepOutputs] : fullDrv.inputDrvs) { + std::function::ChildNode &)> accumInputPaths; + + accumInputPaths = [&](const StorePath & depDrvPath, const DerivedPathMap::ChildNode & inputNode) { /* Add the relevant output closures of the input derivation `i' as input paths. Only add the closures of output paths that are specified as inputs. */ - for (auto & j : wantedDepOutputs) { + auto getOutput = [&](const std::string & outputName) { /* TODO (impure derivations-induced tech debt): Tracking input derivation outputs statefully through the goals is error prone and has led to bugs. @@ -576,21 +600,30 @@ void DerivationGoal::inputsRealised() a representation in the store, which is a usability problem in itself. When implementing this logic entirely with lookups make sure that they're cached. */ - if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) { - worker.store.computeFSClosure(*outPath, inputPaths); + if (auto outPath = get(inputDrvOutputs, { depDrvPath, outputName })) { + return *outPath; } else { auto outMap = worker.evalStore.queryDerivationOutputMap(depDrvPath); - auto outMapPath = outMap.find(j); + auto outMapPath = outMap.find(outputName); if (outMapPath == outMap.end()) { throw Error( "derivation '%s' requires non-existent output '%s' from input derivation '%s'", - worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath)); + worker.store.printStorePath(drvPath), outputName, worker.store.printStorePath(depDrvPath)); } - worker.store.computeFSClosure(outMapPath->second, inputPaths); + return outMapPath->second; } - } - } + }; + + for (auto & outputName : inputNode.value) + worker.store.computeFSClosure(getOutput(outputName), inputPaths); + + for (auto & [outputName, childNode] : inputNode.childMap) + accumInputPaths(getOutput(outputName), childNode); + }; + + for (auto & [depDrvPath, depNode] : fullDrv.inputDrvs.map) + accumInputPaths(depDrvPath, depNode); } /* Second, the input sources. */ @@ -996,10 +1029,11 @@ void DerivationGoal::buildDone() } else { + assert(derivationType); st = dynamic_cast(&e) ? BuildResult::NotDeterministic : statusOk(status) ? BuildResult::OutputRejected : - !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure : + !derivationType->isSandboxed() || diskFull ? BuildResult::TransientFailure : BuildResult::PermanentFailure; } @@ -1151,11 +1185,11 @@ HookReply DerivationGoal::tryBuildHook() throw; } - WorkerProto::WriteConn conn { hook->sink }; + CommonProto::WriteConn conn { hook->sink }; /* Tell the hook all the inputs that have to be copied to the remote system. */ - WorkerProto::write(worker.store, conn, inputPaths); + CommonProto::write(worker.store, conn, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ @@ -1166,7 +1200,7 @@ HookReply DerivationGoal::tryBuildHook() if (buildMode != bmCheck && status.known && status.known->isValid()) continue; missingOutputs.insert(outputName); } - WorkerProto::write(worker.store, conn, missingOutputs); + CommonProto::write(worker.store, conn, missingOutputs); } hook->sink = FdSink(); @@ -1358,7 +1392,7 @@ std::pair DerivationGoal::checkPathValidity() [&](const OutputsSpec::Names & names) { return static_cast(names); }, - }, wantedOutputs.raw()); + }, wantedOutputs.raw); SingleDrvOutputs validOutputs; for (auto & i : queryPartialDerivationOutputMap()) { @@ -1485,12 +1519,13 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result) auto * dg = dynamic_cast(&*waitee); if (!dg) return; - auto outputs = fullDrv.inputDrvs.find(dg->drvPath); - if (outputs == fullDrv.inputDrvs.end()) return; + auto * nodeP = fullDrv.inputDrvs.findSlot(DerivedPath::Opaque { .path = dg->drvPath }); + if (!nodeP) return; + auto & outputs = nodeP->value; - for (auto & outputName : outputs->second) { + for (auto & outputName : outputs) { auto buildResult = dg->getBuildResult(DerivedPath::Built { - .drvPath = dg->drvPath, + .drvPath = makeConstantStorePathRef(dg->drvPath), .outputs = OutputsSpec::Names { outputName }, }); if (buildResult.success()) { diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh index ee8f06f25..ddb5ee1e3 100644 --- a/src/libstore/build/derivation-goal.hh +++ b/src/libstore/build/derivation-goal.hh @@ -50,6 +50,9 @@ struct InitialOutput { std::optional known; }; +/** + * A goal for building some or all of the outputs of a derivation. + */ struct DerivationGoal : public Goal { /** @@ -66,8 +69,7 @@ struct DerivationGoal : public Goal std::shared_ptr resolvedDrvGoal; /** - * The specific outputs that we need to build. Empty means all of - * them. + * The specific outputs that we need to build. */ OutputsSpec wantedOutputs; @@ -184,7 +186,7 @@ struct DerivationGoal : public Goal /** * The sort of derivation we are building. */ - DerivationType derivationType; + std::optional derivationType; typedef void (DerivationGoal::*GoalState)(); GoalState state; @@ -334,7 +336,9 @@ struct DerivationGoal : public Goal StorePathSet exportReferences(const StorePathSet & storePaths); - JobCategory jobCategory() override { return JobCategory::Build; }; + JobCategory jobCategory() const override { + return JobCategory::Build; + }; }; MakeError(NotDeterministic, BuildError); diff --git a/src/libstore/build/drv-output-substitution-goal.hh b/src/libstore/build/drv-output-substitution-goal.hh index 5d1253a71..da2426e5e 100644 --- a/src/libstore/build/drv-output-substitution-goal.hh +++ b/src/libstore/build/drv-output-substitution-goal.hh @@ -73,7 +73,9 @@ public: void work() override; void handleEOF(int fd) override; - JobCategory jobCategory() override { return JobCategory::Substitution; }; + JobCategory jobCategory() const override { + return JobCategory::Substitution; + }; }; } diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 4aa4d6dca..13ff22f45 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -25,8 +25,10 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod ex = std::move(i->ex); } if (i->exitCode != Goal::ecSuccess) { - if (auto i2 = dynamic_cast(i.get())) failed.insert(i2->drvPath); - else if (auto i2 = dynamic_cast(i.get())) failed.insert(i2->storePath); + if (auto i2 = dynamic_cast(i.get())) + failed.insert(i2->drvPath); + else if (auto i2 = dynamic_cast(i.get())) + failed.insert(i2->storePath); } } @@ -77,7 +79,7 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat try { worker.run(Goals{goal}); return goal->getBuildResult(DerivedPath::Built { - .drvPath = drvPath, + .drvPath = makeConstantStorePathRef(drvPath), .outputs = OutputsSpec::All {}, }); } catch (Error & e) { @@ -124,8 +126,11 @@ void Store::repairPath(const StorePath & path) auto info = queryPathInfo(path); if (info->deriver && isValidPath(*info->deriver)) { goals.clear(); - // FIXME: Should just build the specific output we need. - goals.insert(worker.makeDerivationGoal(*info->deriver, OutputsSpec::All { }, bmRepair)); + goals.insert(worker.makeGoal(DerivedPath::Built { + .drvPath = makeConstantStorePathRef(*info->deriver), + // FIXME: Should just build the specific output we need. + .outputs = OutputsSpec::All { }, + }, bmRepair)); worker.run(goals); } else throw Error(worker.failingExitStatus(), "cannot repair path '%s'", printStorePath(path)); diff --git a/src/libstore/build/goal.cc b/src/libstore/build/goal.cc index ca7097a68..f8db98280 100644 --- a/src/libstore/build/goal.cc +++ b/src/libstore/build/goal.cc @@ -11,7 +11,7 @@ bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const { } -BuildResult Goal::getBuildResult(const DerivedPath & req) { +BuildResult Goal::getBuildResult(const DerivedPath & req) const { BuildResult res { buildResult }; if (auto pbp = std::get_if(&req)) { diff --git a/src/libstore/build/goal.hh b/src/libstore/build/goal.hh index a313bf22c..9af083230 100644 --- a/src/libstore/build/goal.hh +++ b/src/libstore/build/goal.hh @@ -41,7 +41,13 @@ typedef std::map WeakGoalMap; * of each category in parallel. */ enum struct JobCategory { + /** + * A build of a derivation; it will use CPU and disk resources. + */ Build, + /** + * A substitution an arbitrary store object; it will use network resources. + */ Substitution, }; @@ -110,7 +116,7 @@ public: * sake of both privacy and determinism, and this "safe accessor" * ensures we don't. */ - BuildResult getBuildResult(const DerivedPath &); + BuildResult getBuildResult(const DerivedPath &) const; /** * Exception containing an error message, if any. @@ -144,7 +150,7 @@ public: void trace(std::string_view s); - std::string getName() + std::string getName() const { return name; } @@ -162,7 +168,11 @@ public: virtual void cleanup() { } - virtual JobCategory jobCategory() = 0; + /** + * @brief Hint for the scheduler, which concurrency limit applies. + * @see JobCategory + */ + virtual JobCategory jobCategory() const = 0; }; void addToWeakGoals(WeakGoals & goals, GoalPtr p); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 6d79f6233..9da3afffd 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -178,6 +178,8 @@ void LocalDerivationGoal::tryLocalBuild() return; } + assert(derivationType); + /* Are we doing a chroot build? */ { auto noChroot = parsedDrv->getBoolAttr("__noChroot"); @@ -195,7 +197,7 @@ void LocalDerivationGoal::tryLocalBuild() else if (settings.sandboxMode == smDisabled) useChroot = false; else if (settings.sandboxMode == smRelaxed) - useChroot = derivationType.isSandboxed() && !noChroot; + useChroot = derivationType->isSandboxed() && !noChroot; } auto & localStore = getLocalStore(); @@ -690,7 +692,7 @@ void LocalDerivationGoal::startBuilder() "nogroup:x:65534:\n", sandboxGid())); /* Create /etc/hosts with localhost entry. */ - if (derivationType.isSandboxed()) + if (derivationType->isSandboxed()) writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n"); /* Make the closure of the inputs available in the chroot, @@ -894,7 +896,7 @@ void LocalDerivationGoal::startBuilder() us. */ - if (derivationType.isSandboxed()) + if (derivationType->isSandboxed()) privateNetwork = true; userNamespaceSync.create(); @@ -1065,7 +1067,7 @@ void LocalDerivationGoal::initTmpDir() { env[i.first] = i.second; } else { auto hash = hashString(htSHA256, i.first); - std::string fn = ".attr-" + hash.to_string(Base32, false); + std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false); Path p = tmpDir + "/" + fn; writeFile(p, rewriteStrings(i.second, inputRewrites)); chownToBuilder(p); @@ -1122,7 +1124,7 @@ void LocalDerivationGoal::initEnv() derivation, tell the builder, so that for instance `fetchurl' can skip checking the output. On older Nixes, this environment variable won't be set, so `fetchurl' will do the check. */ - if (derivationType.isFixed()) env["NIX_OUTPUT_CHECKED"] = "1"; + if (derivationType->isFixed()) env["NIX_OUTPUT_CHECKED"] = "1"; /* *Only* if this is a fixed-output derivation, propagate the values of the environment variables specified in the @@ -1133,9 +1135,19 @@ void LocalDerivationGoal::initEnv() to the builder is generally impure, but the output of fixed-output derivations is by definition pure (since we already know the cryptographic hash of the output). */ - if (!derivationType.isSandboxed()) { - for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) - env[i] = getEnv(i).value_or(""); + if (!derivationType->isSandboxed()) { + auto & impureEnv = settings.impureEnv.get(); + if (!impureEnv.empty()) + experimentalFeatureSettings.require(Xp::ConfigurableImpureEnv); + + for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) { + auto envVar = impureEnv.find(i); + if (envVar != impureEnv.end()) { + env[i] = envVar->second; + } else { + env[i] = getEnv(i).value_or(""); + } + } } /* Currently structured log messages piggyback on stderr, but we @@ -1173,6 +1185,19 @@ void LocalDerivationGoal::writeStructuredAttrs() } +static StorePath pathPartOfReq(const SingleDerivedPath & req) +{ + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & bo) { + return bo.path; + }, + [&](const SingleDerivedPath::Built & bfd) { + return pathPartOfReq(*bfd.drvPath); + }, + }, req.raw()); +} + + static StorePath pathPartOfReq(const DerivedPath & req) { return std::visit(overloaded { @@ -1180,7 +1205,7 @@ static StorePath pathPartOfReq(const DerivedPath & req) return bo.path; }, [&](const DerivedPath::Built & bfd) { - return bfd.drvPath; + return pathPartOfReq(*bfd.drvPath); }, }, req.raw()); } @@ -1785,7 +1810,7 @@ void LocalDerivationGoal::runChild() /* Fixed-output derivations typically need to access the network, so give them access to /etc/resolv.conf and so on. */ - if (!derivationType.isSandboxed()) { + if (!derivationType->isSandboxed()) { // Only use nss functions to resolve hosts and // services. Don’t use it for anything else that may // be configured for this system. This limits the @@ -2036,7 +2061,7 @@ void LocalDerivationGoal::runChild() #include "sandbox-defaults.sb" ; - if (!derivationType.isSandboxed()) + if (!derivationType->isSandboxed()) sandboxProfile += #include "sandbox-network.sb" ; @@ -2497,7 +2522,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() ValidPathInfo newInfo0 { worker.store, outputPathName(drv->name, outputName), - *std::move(optCA), + std::move(*optCA), Hash::dummy, }; if (*scratchPath != newInfo0.path) { @@ -2559,8 +2584,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() delayedException = std::make_exception_ptr( BuildError("hash mismatch in fixed-output derivation '%s':\n specified: %s\n got: %s", worker.store.printStorePath(drvPath), - wanted.to_string(SRI, true), - got.to_string(SRI, true))); + wanted.to_string(HashFormat::SRI, true), + got.to_string(HashFormat::SRI, true))); } if (!newInfo0.references.empty()) delayedException = std::make_exception_ptr( @@ -2587,7 +2612,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() }); }, - }, output->raw()); + }, output->raw); /* FIXME: set proper permissions in restorePath() so we don't have to do another traversal. */ @@ -2941,7 +2966,7 @@ bool LocalDerivationGoal::isReadDesc(int fd) } -StorePath LocalDerivationGoal::makeFallbackPath(std::string_view outputName) +StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName) { return worker.store.makeStorePath( "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName), diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 9acd7593d..8191af7a6 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -120,14 +120,6 @@ struct LocalDerivationGoal : public DerivationGoal */ OutputPathMap scratchOutputs; - /** - * Path registration info from the previous round, if we're - * building multiple times. Since this contains the hash, it - * allows us to compare whether two rounds produced the same - * result. - */ - std::map prevInfos; - uid_t sandboxUid() { return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 1000 : 0) : buildUser->getUID(); } gid_t sandboxGid() { return usingUserNamespace ? (!buildUser || buildUser->getUIDCount() == 1 ? 100 : 0) : buildUser->getGID(); } @@ -272,8 +264,10 @@ struct LocalDerivationGoal : public DerivationGoal /** * Forcibly kill the child process, if any. + * + * Called by destructor, can't be overridden */ - void killChild() override; + void killChild() override final; /** * Kill any processes running under the build user UID or in the @@ -295,7 +289,7 @@ struct LocalDerivationGoal : public DerivationGoal * @todo Add option to randomize, so we can audit whether our * rewrites caught everything */ - StorePath makeFallbackPath(std::string_view outputName); + StorePath makeFallbackPath(OutputNameView outputName); }; } diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh index 9fc041920..1d389d328 100644 --- a/src/libstore/build/substitution-goal.hh +++ b/src/libstore/build/substitution-goal.hh @@ -114,9 +114,12 @@ public: void handleChildOutput(int fd, std::string_view data) override; void handleEOF(int fd) override; - void cleanup() override; + /* Called by destructor, can't be overridden */ + void cleanup() override final; - JobCategory jobCategory() override { return JobCategory::Substitution; }; + JobCategory jobCategory() const override { + return JobCategory::Substitution; + }; }; } diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index a9ca9cbbc..37cb86b91 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -111,7 +111,10 @@ GoalPtr Worker::makeGoal(const DerivedPath & req, BuildMode buildMode) { return std::visit(overloaded { [&](const DerivedPath::Built & bfd) -> GoalPtr { - return makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode); + if (auto bop = std::get_if(&*bfd.drvPath)) + return makeDerivationGoal(bop->path, bfd.outputs, buildMode); + else + throw UnimplementedError("Building dynamic derivations in one shot is not yet implemented."); }, [&](const DerivedPath::Opaque & bo) -> GoalPtr { return makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair); @@ -265,7 +268,10 @@ void Worker::run(const Goals & _topGoals) for (auto & i : _topGoals) { topGoals.insert(i); if (auto goal = dynamic_cast(i.get())) { - topPaths.push_back(DerivedPath::Built{goal->drvPath, goal->wantedOutputs}); + topPaths.push_back(DerivedPath::Built { + .drvPath = makeConstantStorePathRef(goal->drvPath), + .outputs = goal->wantedOutputs, + }); } else if (auto goal = dynamic_cast(i.get())) { topPaths.push_back(DerivedPath::Opaque{goal->storePath}); } @@ -516,10 +522,13 @@ void Worker::markContentsGood(const StorePath & path) } -GoalPtr upcast_goal(std::shared_ptr subGoal) { +GoalPtr upcast_goal(std::shared_ptr subGoal) +{ return subGoal; } -GoalPtr upcast_goal(std::shared_ptr subGoal) { + +GoalPtr upcast_goal(std::shared_ptr subGoal) +{ return subGoal; } diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 5abceca0d..23ad87914 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -34,7 +34,6 @@ GoalPtr upcast_goal(std::shared_ptr subGoal); typedef std::chrono::time_point steady_time_point; - /** * A mapping used to remember for each child process to what goal it * belongs, and file descriptors for receiving log data and output diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 7d7924d77..357800333 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -65,7 +65,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; std::optional ht = parseHashTypeOpt(getAttr("outputHashAlgo")); Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false)); + fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); diff --git a/src/libstore/common-protocol-impl.hh b/src/libstore/common-protocol-impl.hh new file mode 100644 index 000000000..079c182b8 --- /dev/null +++ b/src/libstore/common-protocol-impl.hh @@ -0,0 +1,41 @@ +#pragma once +/** + * @file + * + * Template implementations (as opposed to mere declarations). + * + * This file is an exmample of the "impl.hh" pattern. See the + * contributing guide. + */ + +#include "common-protocol.hh" +#include "length-prefixed-protocol-helper.hh" + +namespace nix { + +/* protocol-agnostic templates */ + +#define COMMON_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ + TEMPLATE T CommonProto::Serialise< T >::read(const Store & store, CommonProto::ReadConn conn) \ + { \ + return LengthPrefixedProtoHelper::read(store, conn); \ + } \ + TEMPLATE void CommonProto::Serialise< T >::write(const Store & store, CommonProto::WriteConn conn, const T & t) \ + { \ + LengthPrefixedProtoHelper::write(store, conn, t); \ + } + +COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::vector) +COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::set) +COMMON_USE_LENGTH_PREFIX_SERIALISER(template, std::tuple) + +#define COMMA_ , +COMMON_USE_LENGTH_PREFIX_SERIALISER( + template, + std::map) +#undef COMMA_ + + +/* protocol-specific templates */ + +} diff --git a/src/libstore/common-protocol.cc b/src/libstore/common-protocol.cc new file mode 100644 index 000000000..f906814bc --- /dev/null +++ b/src/libstore/common-protocol.cc @@ -0,0 +1,98 @@ +#include "serialise.hh" +#include "util.hh" +#include "path-with-outputs.hh" +#include "store-api.hh" +#include "build-result.hh" +#include "common-protocol.hh" +#include "common-protocol-impl.hh" +#include "archive.hh" +#include "derivations.hh" + +#include + +namespace nix { + +/* protocol-agnostic definitions */ + +std::string CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +{ + return readString(conn.from); +} + +void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const std::string & str) +{ + conn.to << str; +} + + +StorePath CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +{ + return store.parseStorePath(readString(conn.from)); +} + +void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const StorePath & storePath) +{ + conn.to << store.printStorePath(storePath); +} + + +ContentAddress CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +{ + return ContentAddress::parse(readString(conn.from)); +} + +void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const ContentAddress & ca) +{ + conn.to << renderContentAddress(ca); +} + + +Realisation CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +{ + std::string rawInput = readString(conn.from); + return Realisation::fromJSON( + nlohmann::json::parse(rawInput), + "remote-protocol" + ); +} + +void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const Realisation & realisation) +{ + conn.to << realisation.toJSON().dump(); +} + + +DrvOutput CommonProto::Serialise::read(const Store & store, CommonProto::ReadConn conn) +{ + return DrvOutput::parse(readString(conn.from)); +} + +void CommonProto::Serialise::write(const Store & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput) +{ + conn.to << drvOutput.to_string(); +} + + +std::optional CommonProto::Serialise>::read(const Store & store, CommonProto::ReadConn conn) +{ + auto s = readString(conn.from); + return s == "" ? std::optional {} : store.parseStorePath(s); +} + +void CommonProto::Serialise>::write(const Store & store, CommonProto::WriteConn conn, const std::optional & storePathOpt) +{ + conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); +} + + +std::optional CommonProto::Serialise>::read(const Store & store, CommonProto::ReadConn conn) +{ + return ContentAddress::parseOpt(readString(conn.from)); +} + +void CommonProto::Serialise>::write(const Store & store, CommonProto::WriteConn conn, const std::optional & caOpt) +{ + conn.to << (caOpt ? renderContentAddress(*caOpt) : ""); +} + +} diff --git a/src/libstore/common-protocol.hh b/src/libstore/common-protocol.hh new file mode 100644 index 000000000..f3f28972a --- /dev/null +++ b/src/libstore/common-protocol.hh @@ -0,0 +1,106 @@ +#pragma once +///@file + +#include "serialise.hh" + +namespace nix { + +class Store; +struct Source; + +// items being serialized +class StorePath; +struct ContentAddress; +struct DrvOutput; +struct Realisation; + + +/** + * Shared serializers between the worker protocol, serve protocol, and a + * few others. + * + * This `struct` is basically just a `namespace`; We use a type rather + * than a namespace just so we can use it as a template argument. + */ +struct CommonProto +{ + /** + * A unidirectional read connection, to be used by the read half of the + * canonical serializers below. + */ + struct ReadConn { + Source & from; + }; + + /** + * A unidirectional write connection, to be used by the write half of the + * canonical serializers below. + */ + struct WriteConn { + Sink & to; + }; + + template + struct Serialise; + + /** + * Wrapper function around `CommonProto::Serialise::write` that allows us to + * infer the type instead of having to write it down explicitly. + */ + template + static void write(const Store & store, WriteConn conn, const T & t) + { + CommonProto::Serialise::write(store, conn, t); + } +}; + +#define DECLARE_COMMON_SERIALISER(T) \ + struct CommonProto::Serialise< T > \ + { \ + static T read(const Store & store, CommonProto::ReadConn conn); \ + static void write(const Store & store, CommonProto::WriteConn conn, const T & str); \ + } + +template<> +DECLARE_COMMON_SERIALISER(std::string); +template<> +DECLARE_COMMON_SERIALISER(StorePath); +template<> +DECLARE_COMMON_SERIALISER(ContentAddress); +template<> +DECLARE_COMMON_SERIALISER(DrvOutput); +template<> +DECLARE_COMMON_SERIALISER(Realisation); + +template +DECLARE_COMMON_SERIALISER(std::vector); +template +DECLARE_COMMON_SERIALISER(std::set); +template +DECLARE_COMMON_SERIALISER(std::tuple); + +#define COMMA_ , +template +DECLARE_COMMON_SERIALISER(std::map); +#undef COMMA_ + +/** + * These use the empty string for the null case, relying on the fact + * that the underlying types never serialize to the empty string. + * + * We do this instead of a generic std::optional instance because + * ordinal tags (0 or 1, here) are a bit of a compatability hazard. For + * the same reason, we don't have a std::variant instances (ordinal + * tags 0...n). + * + * We could the generic instances and then these as specializations for + * compatability, but that's proven a bit finnicky, and also makes the + * worker protocol harder to implement in other languages where such + * specializations may not be allowed. + */ +template<> +DECLARE_COMMON_SERIALISER(std::optional); +template<> +DECLARE_COMMON_SERIALISER(std::optional); + +} diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index 080456e18..52c60154c 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -60,7 +60,7 @@ std::string ContentAddress::render() const + makeFileIngestionPrefix(method); }, }, method.raw) - + this->hash.to_string(Base32, true); + + this->hash.to_string(HashFormat::Base32, true); } /** @@ -83,7 +83,7 @@ static std::pair parseContentAddressMethodPrefix if (!hashTypeRaw) throw UsageError("content address hash must be in form ':', but found: %s", wholeInput); HashType hashType = parseHashType(*hashTypeRaw); - return std::move(hashType); + return hashType; }; // Switch on prefix @@ -115,7 +115,7 @@ ContentAddress ContentAddress::parse(std::string_view rawCa) auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest); return ContentAddress { - .method = std::move(caMethod).raw, + .method = std::move(caMethod), .hash = Hash::parseNonSRIUnprefixed(rest, hashType), }; } diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh index 01b771e52..c4d619bdc 100644 --- a/src/libstore/content-address.hh +++ b/src/libstore/content-address.hh @@ -5,6 +5,7 @@ #include "hash.hh" #include "path.hh" #include "comparator.hh" +#include "variant-wrapper.hh" namespace nix { @@ -71,11 +72,7 @@ struct ContentAddressMethod GENERATE_CMP(ContentAddressMethod, me->raw); - /* The moral equivalent of `using Raw::Raw;` */ - ContentAddressMethod(auto &&... arg) - : raw(std::forward(arg)...) - { } - + MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod); /** * Parse the prefix tag which indicates how the files @@ -252,10 +249,7 @@ struct ContentAddressWithReferences GENERATE_CMP(ContentAddressWithReferences, me->raw); - /* The moral equivalent of `using Raw::Raw;` */ - ContentAddressWithReferences(auto &&... arg) - : raw(std::forward(arg)...) - { } + MAKE_WRAPPER_CONSTRUCTOR(ContentAddressWithReferences); /** * Create a `ContentAddressWithReferences` from a mere diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 8cbf6f044..007ffc05a 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -45,9 +45,9 @@ struct TunnelLogger : public Logger Sync state_; - unsigned int clientVersion; + WorkerProto::Version clientVersion; - TunnelLogger(FdSink & to, unsigned int clientVersion) + TunnelLogger(FdSink & to, WorkerProto::Version clientVersion) : to(to), clientVersion(clientVersion) { } void enqueueMsg(const std::string & s) @@ -261,24 +261,18 @@ struct ClientSettings } }; -static std::vector readDerivedPaths(Store & store, unsigned int clientVersion, WorkerProto::ReadConn conn) -{ - std::vector reqs; - if (GET_PROTOCOL_MINOR(clientVersion) >= 30) { - reqs = WorkerProto::Serialise>::read(store, conn); - } else { - for (auto & s : readStrings(conn.from)) - reqs.push_back(parsePathWithOutputs(store, s).toDerivedPath()); - } - return reqs; -} - static void performOp(TunnelLogger * logger, ref store, - TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion, + TrustedFlag trusted, RecursiveFlag recursive, WorkerProto::Version clientVersion, Source & from, BufferedSink & to, WorkerProto::Op op) { - WorkerProto::ReadConn rconn { .from = from }; - WorkerProto::WriteConn wconn { .to = to }; + WorkerProto::ReadConn rconn { + .from = from, + .version = clientVersion, + }; + WorkerProto::WriteConn wconn { + .to = to, + .version = clientVersion, + }; switch (op) { @@ -334,7 +328,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto hash = store->queryPathInfo(path)->narHash; logger->stopWork(); - to << hash.to_string(Base16, false); + to << hash.to_string(HashFormat::Base16, false); break; } @@ -428,7 +422,7 @@ static void performOp(TunnelLogger * logger, ref store, }(); logger->stopWork(); - pathInfo->write(to, *store, GET_PROTOCOL_MINOR(clientVersion)); + WorkerProto::Serialise::write(*store, wconn, *pathInfo); } else { HashType hashAlgo; std::string baseName; @@ -532,7 +526,7 @@ static void performOp(TunnelLogger * logger, ref store, } case WorkerProto::Op::BuildPaths: { - auto drvs = readDerivedPaths(*store, clientVersion, rconn); + auto drvs = WorkerProto::Serialise::read(*store, rconn); BuildMode mode = bmNormal; if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { mode = (BuildMode) readInt(from); @@ -557,7 +551,7 @@ static void performOp(TunnelLogger * logger, ref store, } case WorkerProto::Op::BuildPathsWithResults: { - auto drvs = readDerivedPaths(*store, clientVersion, rconn); + auto drvs = WorkerProto::Serialise::read(*store, rconn); BuildMode mode = bmNormal; mode = (BuildMode) readInt(from); @@ -641,16 +635,7 @@ static void performOp(TunnelLogger * logger, ref store, auto res = store->buildDerivation(drvPath, drv, buildMode); logger->stopWork(); - to << res.status << res.errorMsg; - if (GET_PROTOCOL_MINOR(clientVersion) >= 29) { - to << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime; - } - if (GET_PROTOCOL_MINOR(clientVersion) >= 28) { - DrvOutputs builtOutputs; - for (auto & [output, realisation] : res.builtOutputs) - builtOutputs.insert_or_assign(realisation.id, realisation); - WorkerProto::write(*store, wconn, builtOutputs); - } + WorkerProto::write(*store, wconn, res); break; } @@ -834,7 +819,7 @@ static void performOp(TunnelLogger * logger, ref store, if (info) { if (GET_PROTOCOL_MINOR(clientVersion) >= 17) to << 1; - info->write(to, *store, GET_PROTOCOL_MINOR(clientVersion), false); + WorkerProto::write(*store, wconn, static_cast(*info)); } else { assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); to << 0; @@ -932,7 +917,7 @@ static void performOp(TunnelLogger * logger, ref store, } case WorkerProto::Op::QueryMissing: { - auto targets = readDerivedPaths(*store, clientVersion, rconn); + auto targets = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; @@ -1017,7 +1002,7 @@ void processConnection( if (magic != WORKER_MAGIC_1) throw Error("protocol mismatch"); to << WORKER_MAGIC_2 << PROTOCOL_VERSION; to.flush(); - unsigned int clientVersion = readInt(from); + WorkerProto::Version clientVersion = readInt(from); if (clientVersion < 0x10a) throw Error("the Nix client version is too old"); @@ -1052,7 +1037,10 @@ void processConnection( auto temp = trusted ? store->isTrustedClient() : std::optional { NotTrusted }; - WorkerProto::WriteConn wconn { .to = to }; + WorkerProto::WriteConn wconn { + .to = to, + .version = clientVersion, + }; WorkerProto::write(*store, wconn, temp); } diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index f4e4980c2..a5ceb29dc 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -4,15 +4,15 @@ #include "globals.hh" #include "util.hh" #include "split.hh" -#include "worker-protocol.hh" -#include "worker-protocol-impl.hh" +#include "common-protocol.hh" +#include "common-protocol-impl.hh" #include "fs-accessor.hh" #include #include namespace nix { -std::optional DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const +std::optional DerivationOutput::path(const Store & store, std::string_view drvName, OutputNameView outputName) const { return std::visit(overloaded { [](const DerivationOutput::InputAddressed & doi) -> std::optional { @@ -32,11 +32,11 @@ std::optional DerivationOutput::path(const Store & store, std::string [](const DerivationOutput::Impure &) -> std::optional { return std::nullopt; }, - }, raw()); + }, raw); } -StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const +StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, OutputNameView outputName) const { return store.makeFixedOutputPathFromCA( outputPathName(drvName, outputName), @@ -60,7 +60,7 @@ bool DerivationType::isCA() const [](const Impure &) { return true; }, - }, raw()); + }, raw); } bool DerivationType::isFixed() const @@ -75,7 +75,7 @@ bool DerivationType::isFixed() const [](const Impure &) { return false; }, - }, raw()); + }, raw); } bool DerivationType::hasKnownOutputPaths() const @@ -90,7 +90,7 @@ bool DerivationType::hasKnownOutputPaths() const [](const Impure &) { return false; }, - }, raw()); + }, raw); } @@ -106,7 +106,7 @@ bool DerivationType::isSandboxed() const [](const Impure &) { return false; }, - }, raw()); + }, raw); } @@ -122,7 +122,7 @@ bool DerivationType::isPure() const [](const Impure &) { return false; }, - }, raw()); + }, raw); } @@ -136,7 +136,7 @@ StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly) { auto references = drv.inputSrcs; - for (auto & i : drv.inputDrvs) + for (auto & i : drv.inputDrvs.map) references.insert(i.first); /* Note that the outputs of a derivation are *not* references (that can be missing (of course) and should not necessarily be @@ -154,8 +154,9 @@ static void expect(std::istream & str, std::string_view s) { char s2[s.size()]; str.read(s2, s.size()); - if (std::string(s2, s.size()) != s) - throw FormatError("expected string '%1%'", s); + std::string_view s2View { s2, s.size() }; + if (s2View != s) + throw FormatError("expected string '%s', got '%s'", s, s2View); } @@ -207,23 +208,27 @@ static bool endOfList(std::istream & str) static StringSet parseStrings(std::istream & str, bool arePaths) { StringSet res; + expect(str, "["); while (!endOfList(str)) res.insert(arePaths ? parsePath(str) : parseString(str)); return res; } -static DerivationOutput parseDerivationOutput(const Store & store, - std::string_view pathS, std::string_view hashAlgo, std::string_view hashS) +static DerivationOutput parseDerivationOutput( + const Store & store, + std::string_view pathS, std::string_view hashAlgo, std::string_view hashS, + const ExperimentalFeatureSettings & xpSettings) { if (hashAlgo != "") { ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo); if (method == TextIngestionMethod {}) - experimentalFeatureSettings.require(Xp::DynamicDerivations); + xpSettings.require(Xp::DynamicDerivations); const auto hashType = parseHashType(hashAlgo); if (hashS == "impure") { - experimentalFeatureSettings.require(Xp::ImpureDerivations); - assert(pathS == ""); + xpSettings.require(Xp::ImpureDerivations); + if (pathS != "") + throw FormatError("impure derivation output should not specify output path"); return DerivationOutput::Impure { .method = std::move(method), .hashType = std::move(hashType), @@ -238,8 +243,9 @@ static DerivationOutput parseDerivationOutput(const Store & store, }, }; } else { - experimentalFeatureSettings.require(Xp::CaDerivations); - assert(pathS == ""); + xpSettings.require(Xp::CaDerivations); + if (pathS != "") + throw FormatError("content-addressed derivation output should not specify output path"); return DerivationOutput::CAFloating { .method = std::move(method), .hashType = std::move(hashType), @@ -256,29 +262,116 @@ static DerivationOutput parseDerivationOutput(const Store & store, } } -static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str) +static DerivationOutput parseDerivationOutput( + const Store & store, std::istringstream & str, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { expect(str, ","); const auto pathS = parseString(str); expect(str, ","); const auto hashAlgo = parseString(str); expect(str, ","); const auto hash = parseString(str); expect(str, ")"); - return parseDerivationOutput(store, pathS, hashAlgo, hash); + return parseDerivationOutput(store, pathS, hashAlgo, hash, xpSettings); +} + +/** + * All ATerm Derivation format versions currently known. + * + * Unknown versions are rejected at the parsing stage. + */ +enum struct DerivationATermVersion { + /** + * Older unversioned form + */ + Traditional, + + /** + * Newer versioned form; only this version so far. + */ + DynamicDerivations, +}; + +static DerivedPathMap::ChildNode parseDerivedPathMapNode( + const Store & store, + std::istringstream & str, + DerivationATermVersion version) +{ + DerivedPathMap::ChildNode node; + + auto parseNonDynamic = [&]() { + node.value = parseStrings(str, false); + }; + + // Older derivation should never use new form, but newer + // derivaiton can use old form. + switch (version) { + case DerivationATermVersion::Traditional: + parseNonDynamic(); + break; + case DerivationATermVersion::DynamicDerivations: + switch (str.peek()) { + case '[': + parseNonDynamic(); + break; + case '(': + expect(str, "("); + node.value = parseStrings(str, false); + expect(str, ",["); + while (!endOfList(str)) { + expect(str, "("); + auto outputName = parseString(str); + expect(str, ","); + node.childMap.insert_or_assign(outputName, parseDerivedPathMapNode(store, str, version)); + expect(str, ")"); + } + expect(str, ")"); + break; + default: + throw FormatError("invalid inputDrvs entry in derivation"); + } + break; + default: + // invalid format, not a parse error but internal error + assert(false); + } + return node; } -Derivation parseDerivation(const Store & store, std::string && s, std::string_view name) +Derivation parseDerivation( + const Store & store, std::string && s, std::string_view name, + const ExperimentalFeatureSettings & xpSettings) { Derivation drv; drv.name = name; std::istringstream str(std::move(s)); - expect(str, "Derive(["); + expect(str, "D"); + DerivationATermVersion version; + switch (str.peek()) { + case 'e': + expect(str, "erive("); + version = DerivationATermVersion::Traditional; + break; + case 'r': + expect(str, "rvWithVersion("); + auto versionS = parseString(str); + if (versionS == "xp-dyn-drv") { + // Only verison we have so far + version = DerivationATermVersion::DynamicDerivations; + xpSettings.require(Xp::DynamicDerivations); + } else { + throw FormatError("Unknown derivation ATerm format version '%s'", versionS); + } + expect(str, ","); + break; + } /* Parse the list of outputs. */ + expect(str, "["); while (!endOfList(str)) { expect(str, "("); std::string id = parseString(str); - auto output = parseDerivationOutput(store, str); + auto output = parseDerivationOutput(store, str, xpSettings); drv.outputs.emplace(std::move(id), std::move(output)); } @@ -287,12 +380,12 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi while (!endOfList(str)) { expect(str, "("); Path drvPath = parsePath(str); - expect(str, ",["); - drv.inputDrvs.insert_or_assign(store.parseStorePath(drvPath), parseStrings(str, false)); + expect(str, ","); + drv.inputDrvs.map.insert_or_assign(store.parseStorePath(drvPath), parseDerivedPathMapNode(store, str, version)); expect(str, ")"); } - expect(str, ",["); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); + expect(str, ","); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); expect(str, ","); drv.platform = parseString(str); expect(str, ","); drv.builder = parseString(str); @@ -376,14 +469,67 @@ static void printUnquotedStrings(std::string & res, ForwardIterator i, ForwardIt } +static void unparseDerivedPathMapNode(const Store & store, std::string & s, const DerivedPathMap::ChildNode & node) +{ + s += ','; + if (node.childMap.empty()) { + printUnquotedStrings(s, node.value.begin(), node.value.end()); + } else { + s += "("; + printUnquotedStrings(s, node.value.begin(), node.value.end()); + s += ",["; + bool first = true; + for (auto & [outputName, childNode] : node.childMap) { + if (first) first = false; else s += ','; + s += '('; printUnquotedString(s, outputName); + unparseDerivedPathMapNode(store, s, childNode); + s += ')'; + } + s += "])"; + } +} + + +/** + * Does the derivation have a dependency on the output of a dynamic + * derivation? + * + * In other words, does it on the output of derivation that is itself an + * ouput of a derivation? This corresponds to a dependency that is an + * inductive derived path with more than one layer of + * `DerivedPath::Built`. + */ +static bool hasDynamicDrvDep(const Derivation & drv) +{ + return + std::find_if( + drv.inputDrvs.map.begin(), + drv.inputDrvs.map.end(), + [](auto & kv) { return !kv.second.childMap.empty(); }) + != drv.inputDrvs.map.end(); +} + + std::string Derivation::unparse(const Store & store, bool maskOutputs, - std::map * actualInputs) const + DerivedPathMap::ChildNode::Map * actualInputs) const { std::string s; s.reserve(65536); - s += "Derive(["; + + /* Use older unversioned form if possible, for wider compat. Use + newer form only if we need it, which we do for + `Xp::DynamicDerivations`. */ + if (hasDynamicDrvDep(*this)) { + s += "DrvWithVersion("; + // Only version we have so far + printUnquotedString(s, "xp-dyn-drv"); + s += ","; + } else { + s += "Derive("; + } bool first = true; + s += "["; for (auto & i : outputs) { if (first) first = false; else s += ','; s += '('; printUnquotedString(s, i.first); @@ -396,7 +542,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, [&](const DerivationOutput::CAFixed & dof) { s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); s += ','; printUnquotedString(s, dof.ca.printMethodAlgo()); - s += ','; printUnquotedString(s, dof.ca.hash.to_string(Base16, false)); + s += ','; printUnquotedString(s, dof.ca.hash.to_string(HashFormat::Base16, false)); }, [&](const DerivationOutput::CAFloating & dof) { s += ','; printUnquotedString(s, ""); @@ -408,30 +554,30 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, ""); }, - [&](const DerivationOutputImpure & doi) { + [&](const DerivationOutput::Impure & doi) { // FIXME s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType)); s += ','; printUnquotedString(s, "impure"); } - }, i.second.raw()); + }, i.second.raw); s += ')'; } s += "],["; first = true; if (actualInputs) { - for (auto & i : *actualInputs) { + for (auto & [drvHashModulo, childMap] : *actualInputs) { if (first) first = false; else s += ','; - s += '('; printUnquotedString(s, i.first); - s += ','; printUnquotedStrings(s, i.second.begin(), i.second.end()); + s += '('; printUnquotedString(s, drvHashModulo); + unparseDerivedPathMapNode(store, s, childMap); s += ')'; } } else { - for (auto & i : inputDrvs) { + for (auto & [drvPath, childMap] : inputDrvs.map) { if (first) first = false; else s += ','; - s += '('; printUnquotedString(s, store.printStorePath(i.first)); - s += ','; printUnquotedStrings(s, i.second.begin(), i.second.end()); + s += '('; printUnquotedString(s, store.printStorePath(drvPath)); + unparseDerivedPathMapNode(store, s, childMap); s += ')'; } } @@ -466,7 +612,7 @@ bool isDerivation(std::string_view fileName) } -std::string outputPathName(std::string_view drvName, std::string_view outputName) { +std::string outputPathName(std::string_view drvName, OutputNameView outputName) { std::string res { drvName }; if (outputName != "out") { res += "-"; @@ -509,7 +655,7 @@ DerivationType BasicDerivation::type() const [&](const DerivationOutput::Impure &) { impureOutputs.insert(i.first); }, - }, i.second.raw()); + }, i.second.raw); } if (inputAddressedOutputs.empty() @@ -626,10 +772,10 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut if (type.isFixed()) { std::map outputHashes; for (const auto & i : drv.outputs) { - auto & dof = std::get(i.second.raw()); + auto & dof = std::get(i.second.raw); auto hash = hashString(htSHA256, "fixed:out:" + dof.ca.printMethodAlgo() + ":" - + dof.ca.hash.to_string(Base16, false) + ":" + + dof.ca.hash.to_string(HashFormat::Base16, false) + ":" + store.printStorePath(dof.path(store, drv.name, i.first))); outputHashes.insert_or_assign(i.first, std::move(hash)); } @@ -663,20 +809,18 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut [](const DerivationType::Impure &) -> DrvHash::Kind { assert(false); } - }, drv.type().raw()); + }, drv.type().raw); - std::map inputs2; - for (auto & [drvPath, inputOutputs0] : drv.inputDrvs) { - // Avoid lambda capture restriction with standard / Clang - auto & inputOutputs = inputOutputs0; + DerivedPathMap::ChildNode::Map inputs2; + for (auto & [drvPath, node] : drv.inputDrvs.map) { const auto & res = pathDerivationModulo(store, drvPath); if (res.kind == DrvHash::Kind::Deferred) kind = DrvHash::Kind::Deferred; - for (auto & outputName : inputOutputs) { + for (auto & outputName : node.value) { const auto h = get(res.hashes, outputName); if (!h) throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name); - inputs2[h->to_string(Base16, false)].insert(outputName); + inputs2[h->to_string(HashFormat::Base16, false)].value.insert(outputName); } } @@ -706,7 +850,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store) const auto hashAlgo = readString(in); const auto hash = readString(in); - return parseDerivationOutput(store, pathS, hashAlgo, hash); + return parseDerivationOutput(store, pathS, hashAlgo, hash, experimentalFeatureSettings); } StringSet BasicDerivation::outputNames() const @@ -720,10 +864,10 @@ StringSet BasicDerivation::outputNames() const DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const { DerivationOutputsAndOptPaths outsAndOptPaths; - for (auto output : outputs) + for (auto & [outputName, output] : outputs) outsAndOptPaths.insert(std::make_pair( - output.first, - std::make_pair(output.second, output.second.path(store, name, output.first)) + outputName, + std::make_pair(output, output.path(store, name, outputName)) ) ); return outsAndOptPaths; @@ -751,8 +895,8 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = WorkerProto::Serialise::read(store, - WorkerProto::ReadConn { .from = in }); + drv.inputSrcs = CommonProto::Serialise::read(store, + CommonProto::ReadConn { .from = in }); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -781,7 +925,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr [&](const DerivationOutput::CAFixed & dof) { out << store.printStorePath(dof.path(store, drv.name, i.first)) << dof.ca.printMethodAlgo() - << dof.ca.hash.to_string(Base16, false); + << dof.ca.hash.to_string(HashFormat::Base16, false); }, [&](const DerivationOutput::CAFloating & dof) { out << "" @@ -798,10 +942,10 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr << (doi.method.renderPrefix() + printHashType(doi.hashType)) << "impure"; }, - }, i.second.raw()); + }, i.second.raw); } - WorkerProto::write(store, - WorkerProto::WriteConn { .to = out }, + CommonProto::write(store, + CommonProto::WriteConn { .to = out }, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); @@ -810,10 +954,10 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr } -std::string hashPlaceholder(const std::string_view outputName) +std::string hashPlaceholder(const OutputNameView outputName) { // FIXME: memoize? - return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false); + return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false); } @@ -821,6 +965,8 @@ std::string hashPlaceholder(const std::string_view outputName) static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) { + debug("Rewriting the derivation"); + for (auto & rewrite : rewrites) { debug("rewriting %s as %s", rewrite.first, rewrite.second); } @@ -840,7 +986,7 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String auto hashModulo = hashDerivationModulo(store, Derivation(drv), true); for (auto & [outputName, output] : drv.outputs) { - if (std::holds_alternative(output.raw())) { + if (std::holds_alternative(output.raw)) { auto h = get(hashModulo.hashes, outputName); if (!h) throw Error("derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)", @@ -859,14 +1005,70 @@ std::optional Derivation::tryResolve(Store & store) const { std::map, StorePath> inputDrvOutputs; - for (auto & input : inputDrvs) - for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(input.first)) - if (outputPath) - inputDrvOutputs.insert_or_assign({input.first, outputName}, *outputPath); + std::function::ChildNode &)> accum; + accum = [&](auto & inputDrv, auto & node) { + for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv)) { + if (outputPath) { + inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath); + if (auto p = get(node.childMap, outputName)) + accum(*outputPath, *p); + } + } + }; + + for (auto & [inputDrv, node] : inputDrvs.map) + accum(inputDrv, node); return tryResolve(store, inputDrvOutputs); } +static bool tryResolveInput( + Store & store, StorePathSet & inputSrcs, StringMap & inputRewrites, + const DownstreamPlaceholder * placeholderOpt, + const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode, + const std::map, StorePath> & inputDrvOutputs) +{ + auto getOutput = [&](const std::string & outputName) { + auto * actualPathOpt = get(inputDrvOutputs, { inputDrv, outputName }); + if (!actualPathOpt) + warn("output %s of input %s missing, aborting the resolving", + outputName, + store.printStorePath(inputDrv) + ); + return actualPathOpt; + }; + + auto getPlaceholder = [&](const std::string & outputName) { + return placeholderOpt + ? DownstreamPlaceholder::unknownDerivation(*placeholderOpt, outputName) + : DownstreamPlaceholder::unknownCaOutput(inputDrv, outputName); + }; + + for (auto & outputName : inputNode.value) { + auto actualPathOpt = getOutput(outputName); + if (!actualPathOpt) return false; + auto actualPath = *actualPathOpt; + if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { + inputRewrites.emplace( + getPlaceholder(outputName).render(), + store.printStorePath(actualPath)); + } + inputSrcs.insert(std::move(actualPath)); + } + + for (auto & [outputName, childNode] : inputNode.childMap) { + auto actualPathOpt = getOutput(outputName); + if (!actualPathOpt) return false; + auto actualPath = *actualPathOpt; + auto nextPlaceholder = getPlaceholder(outputName); + if (!tryResolveInput(store, inputSrcs, inputRewrites, + &nextPlaceholder, actualPath, childNode, + inputDrvOutputs)) + return false; + } + return true; +} + std::optional Derivation::tryResolve( Store & store, const std::map, StorePath> & inputDrvOutputs) const @@ -876,23 +1078,10 @@ std::optional Derivation::tryResolve( // Input paths that we'll want to rewrite in the derivation StringMap inputRewrites; - for (auto & [inputDrv, inputOutputs] : inputDrvs) { - for (auto & outputName : inputOutputs) { - if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) { - if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { - inputRewrites.emplace( - DownstreamPlaceholder::unknownCaOutput(inputDrv, outputName).render(), - store.printStorePath(*actualPath)); - } - resolved.inputSrcs.insert(*actualPath); - } else { - warn("output '%s' of input '%s' missing, aborting the resolving", - outputName, - store.printStorePath(inputDrv)); - return {}; - } - } - } + for (auto & [inputDrv, inputNode] : inputDrvs.map) + if (!tryResolveInput(store, resolved.inputSrcs, inputRewrites, + nullptr, inputDrv, inputNode, inputDrvOutputs)) + return std::nullopt; rewriteDerivation(store, resolved, inputRewrites); @@ -955,7 +1144,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const [&](const DerivationOutput::Impure &) { /* Nothing to check */ }, - }, i.second.raw()); + }, i.second.raw); } } @@ -963,7 +1152,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const const Hash impureOutputHash = hashString(htSHA256, "impure"); nlohmann::json DerivationOutput::toJSON( - const Store & store, std::string_view drvName, std::string_view outputName) const + const Store & store, std::string_view drvName, OutputNameView outputName) const { nlohmann::json res = nlohmann::json::object(); std::visit(overloaded { @@ -973,7 +1162,7 @@ nlohmann::json DerivationOutput::toJSON( [&](const DerivationOutput::CAFixed & dof) { res["path"] = store.printStorePath(dof.path(store, drvName, outputName)); res["hashAlgo"] = dof.ca.printMethodAlgo(); - res["hash"] = dof.ca.hash.to_string(Base16, false); + res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false); // FIXME print refs? }, [&](const DerivationOutput::CAFloating & dof) { @@ -984,13 +1173,13 @@ nlohmann::json DerivationOutput::toJSON( res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType); res["impure"] = true; }, - }, raw()); + }, raw); return res; } DerivationOutput DerivationOutput::fromJSON( - const Store & store, std::string_view drvName, std::string_view outputName, + const Store & store, std::string_view drvName, OutputNameView outputName, const nlohmann::json & _json, const ExperimentalFeatureSettings & xpSettings) { @@ -1081,10 +1270,25 @@ nlohmann::json Derivation::toJSON(const Store & store) const } { - auto& inputDrvsObj = res["inputDrvs"]; - inputDrvsObj = nlohmann::json ::object(); - for (auto & input : inputDrvs) - inputDrvsObj[store.printStorePath(input.first)] = input.second; + std::function::ChildNode &)> doInput; + doInput = [&](const auto & inputNode) { + auto value = nlohmann::json::object(); + value["outputs"] = inputNode.value; + { + auto next = nlohmann::json::object(); + for (auto & [outputId, childNode] : inputNode.childMap) + next[outputId] = doInput(childNode); + value["dynamicOutputs"] = std::move(next); + } + return value; + }; + { + auto& inputDrvsObj = res["inputDrvs"]; + inputDrvsObj = nlohmann::json::object(); + for (auto & [inputDrv, inputNode] : inputDrvs.map) { + inputDrvsObj[store.printStorePath(inputDrv)] = doInput(inputNode); + } + } } res["system"] = platform; @@ -1098,7 +1302,8 @@ nlohmann::json Derivation::toJSON(const Store & store) const Derivation Derivation::fromJSON( const Store & store, - const nlohmann::json & json) + const nlohmann::json & json, + const ExperimentalFeatureSettings & xpSettings) { using nlohmann::detail::value_t; @@ -1130,12 +1335,21 @@ Derivation Derivation::fromJSON( } try { + std::function::ChildNode(const nlohmann::json &)> doInput; + doInput = [&](const auto & json) { + DerivedPathMap::ChildNode node; + node.value = static_cast( + ensureType(valueAt(json, "outputs"), value_t::array)); + for (auto & [outputId, childNode] : ensureType(valueAt(json, "dynamicOutputs"), value_t::object).items()) { + xpSettings.require(Xp::DynamicDerivations); + node.childMap[outputId] = doInput(childNode); + } + return node; + }; auto & inputDrvsObj = ensureType(valueAt(json, "inputDrvs"), value_t::object); - for (auto & [inputDrvPath, inputOutputs] : inputDrvsObj.items()) { - ensureType(inputOutputs, value_t::array); - res.inputDrvs[store.parseStorePath(inputDrvPath)] = - static_cast(inputOutputs); - } + for (auto & [inputDrvPath, inputOutputs] : inputDrvsObj.items()) + res.inputDrvs.map[store.parseStorePath(inputDrvPath)] = + doInput(inputOutputs); } catch (Error & e) { e.addTrace({}, "while reading key 'inputDrvs'"); throw; diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index fa79f77fd..fa14e7536 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -6,9 +6,10 @@ #include "hash.hh" #include "content-address.hh" #include "repair-flag.hh" -#include "derived-path.hh" +#include "derived-path-map.hh" #include "sync.hh" #include "comparator.hh" +#include "variant-wrapper.hh" #include #include @@ -20,108 +21,110 @@ class Store; /* Abstract syntax of derivations. */ -/** - * The traditional non-fixed-output derivation type. - */ -struct DerivationOutputInputAddressed -{ - StorePath path; - - GENERATE_CMP(DerivationOutputInputAddressed, me->path); -}; - -/** - * Fixed-output derivations, whose output paths are content - * addressed according to that fixed output. - */ -struct DerivationOutputCAFixed -{ - /** - * Method and hash used for expected hash computation. - * - * References are not allowed by fiat. - */ - ContentAddress ca; - - /** - * Return the \ref StorePath "store path" corresponding to this output - * - * @param drvName The name of the derivation this is an output of, without the `.drv`. - * @param outputName The name of this output. - */ - StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const; - - GENERATE_CMP(DerivationOutputCAFixed, me->ca); -}; - -/** - * Floating-output derivations, whose output paths are content - * addressed, but not fixed, and so are dynamically calculated from - * whatever the output ends up being. - * */ -struct DerivationOutputCAFloating -{ - /** - * How the file system objects will be serialized for hashing - */ - ContentAddressMethod method; - - /** - * How the serialization will be hashed - */ - HashType hashType; - - GENERATE_CMP(DerivationOutputCAFloating, me->method, me->hashType); -}; - -/** - * Input-addressed output which depends on a (CA) derivation whose hash - * isn't known yet. - */ -struct DerivationOutputDeferred { - GENERATE_CMP(DerivationOutputDeferred); -}; - -/** - * Impure output which is moved to a content-addressed location (like - * CAFloating) but isn't registered as a realization. - */ -struct DerivationOutputImpure -{ - /** - * How the file system objects will be serialized for hashing - */ - ContentAddressMethod method; - - /** - * How the serialization will be hashed - */ - HashType hashType; - - GENERATE_CMP(DerivationOutputImpure, me->method, me->hashType); -}; - -typedef std::variant< - DerivationOutputInputAddressed, - DerivationOutputCAFixed, - DerivationOutputCAFloating, - DerivationOutputDeferred, - DerivationOutputImpure -> _DerivationOutputRaw; - /** * A single output of a BasicDerivation (and Derivation). */ -struct DerivationOutput : _DerivationOutputRaw +struct DerivationOutput { - using Raw = _DerivationOutputRaw; - using Raw::Raw; + /** + * The traditional non-fixed-output derivation type. + */ + struct InputAddressed + { + StorePath path; - using InputAddressed = DerivationOutputInputAddressed; - using CAFixed = DerivationOutputCAFixed; - using CAFloating = DerivationOutputCAFloating; - using Deferred = DerivationOutputDeferred; - using Impure = DerivationOutputImpure; + GENERATE_CMP(InputAddressed, me->path); + }; + + /** + * Fixed-output derivations, whose output paths are content + * addressed according to that fixed output. + */ + struct CAFixed + { + /** + * Method and hash used for expected hash computation. + * + * References are not allowed by fiat. + */ + ContentAddress ca; + + /** + * Return the \ref StorePath "store path" corresponding to this output + * + * @param drvName The name of the derivation this is an output of, without the `.drv`. + * @param outputName The name of this output. + */ + StorePath path(const Store & store, std::string_view drvName, OutputNameView outputName) const; + + GENERATE_CMP(CAFixed, me->ca); + }; + + /** + * Floating-output derivations, whose output paths are content + * addressed, but not fixed, and so are dynamically calculated from + * whatever the output ends up being. + * */ + struct CAFloating + { + /** + * How the file system objects will be serialized for hashing + */ + ContentAddressMethod method; + + /** + * How the serialization will be hashed + */ + HashType hashType; + + GENERATE_CMP(CAFloating, me->method, me->hashType); + }; + + /** + * Input-addressed output which depends on a (CA) derivation whose hash + * isn't known yet. + */ + struct Deferred { + GENERATE_CMP(Deferred); + }; + + /** + * Impure output which is moved to a content-addressed location (like + * CAFloating) but isn't registered as a realization. + */ + struct Impure + { + /** + * How the file system objects will be serialized for hashing + */ + ContentAddressMethod method; + + /** + * How the serialization will be hashed + */ + HashType hashType; + + GENERATE_CMP(Impure, me->method, me->hashType); + }; + + typedef std::variant< + InputAddressed, + CAFixed, + CAFloating, + Deferred, + Impure + > Raw; + + Raw raw; + + GENERATE_CMP(DerivationOutput, me->raw); + + MAKE_WRAPPER_CONSTRUCTOR(DerivationOutput); + + /** + * Force choosing a variant + */ + DerivationOutput() = delete; /** * \note when you use this function you should make sure that you're @@ -129,23 +132,19 @@ struct DerivationOutput : _DerivationOutputRaw * the safer interface provided by * BasicDerivation::outputsAndOptPaths */ - std::optional path(const Store & store, std::string_view drvName, std::string_view outputName) const; - - inline const Raw & raw() const { - return static_cast(*this); - } + std::optional path(const Store & store, std::string_view drvName, OutputNameView outputName) const; nlohmann::json toJSON( const Store & store, std::string_view drvName, - std::string_view outputName) const; + OutputNameView outputName) const; /** * @param xpSettings Stop-gap to avoid globals during unit tests. */ static DerivationOutput fromJSON( const Store & store, std::string_view drvName, - std::string_view outputName, + OutputNameView outputName, const nlohmann::json & json, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); }; @@ -167,61 +166,71 @@ typedef std::map DerivationInputs; -/** - * Input-addressed derivation types - */ -struct DerivationType_InputAddressed { +struct DerivationType { /** - * True iff the derivation type can't be determined statically, - * for instance because it (transitively) depends on a content-addressed - * derivation. - */ - bool deferred; -}; - -/** - * Content-addressed derivation types - */ -struct DerivationType_ContentAddressed { - /** - * Whether the derivation should be built safely inside a sandbox. + * Input-addressed derivation types */ - bool sandboxed; + struct InputAddressed { + /** + * True iff the derivation type can't be determined statically, + * for instance because it (transitively) depends on a content-addressed + * derivation. + */ + bool deferred; + + GENERATE_CMP(InputAddressed, me->deferred); + }; + /** - * Whether the derivation's outputs' content-addresses are "fixed" - * or "floating. - * - * - Fixed: content-addresses are written down as part of the - * derivation itself. If the outputs don't end up matching the - * build fails. - * - * - Floating: content-addresses are not written down, we do not - * know them until we perform the build. + * Content-addressed derivation types */ - bool fixed; -}; + struct ContentAddressed { + /** + * Whether the derivation should be built safely inside a sandbox. + */ + bool sandboxed; + /** + * Whether the derivation's outputs' content-addresses are "fixed" + * or "floating". + * + * - Fixed: content-addresses are written down as part of the + * derivation itself. If the outputs don't end up matching the + * build fails. + * + * - Floating: content-addresses are not written down, we do not + * know them until we perform the build. + */ + bool fixed; -/** - * Impure derivation type - * - * This is similar at buil-time to the content addressed, not standboxed, not fixed - * type, but has some restrictions on its usage. - */ -struct DerivationType_Impure { -}; + GENERATE_CMP(ContentAddressed, me->sandboxed, me->fixed); + }; -typedef std::variant< - DerivationType_InputAddressed, - DerivationType_ContentAddressed, - DerivationType_Impure -> _DerivationTypeRaw; + /** + * Impure derivation type + * + * This is similar at buil-time to the content addressed, not standboxed, not fixed + * type, but has some restrictions on its usage. + */ + struct Impure { + GENERATE_CMP(Impure); + }; -struct DerivationType : _DerivationTypeRaw { - using Raw = _DerivationTypeRaw; - using Raw::Raw; - using InputAddressed = DerivationType_InputAddressed; - using ContentAddressed = DerivationType_ContentAddressed; - using Impure = DerivationType_Impure; + typedef std::variant< + InputAddressed, + ContentAddressed, + Impure + > Raw; + + Raw raw; + + GENERATE_CMP(DerivationType, me->raw); + + MAKE_WRAPPER_CONSTRUCTOR(DerivationType); + + /** + * Force choosing a variant + */ + DerivationType() = delete; /** * Do the outputs of the derivation have paths calculated from their @@ -257,10 +266,6 @@ struct DerivationType : _DerivationTypeRaw { * closure, or if fixed output. */ bool hasKnownOutputPaths() const; - - inline const Raw & raw() const { - return static_cast(*this); - } }; struct BasicDerivation @@ -318,13 +323,13 @@ struct Derivation : BasicDerivation /** * inputs that are sub-derivations */ - DerivationInputs inputDrvs; + DerivedPathMap> inputDrvs; /** * Print a derivation. */ std::string unparse(const Store & store, bool maskOutputs, - std::map * actualInputs = nullptr) const; + DerivedPathMap::ChildNode::Map * actualInputs = nullptr) const; /** * Return the underlying basic derivation but with these changes: @@ -363,7 +368,8 @@ struct Derivation : BasicDerivation nlohmann::json toJSON(const Store & store) const; static Derivation fromJSON( const Store & store, - const nlohmann::json & json); + const nlohmann::json & json, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); GENERATE_CMP(Derivation, static_cast(*me), @@ -384,7 +390,11 @@ StorePath writeDerivation(Store & store, /** * Read a derivation from a file. */ -Derivation parseDerivation(const Store & store, std::string && s, std::string_view name); +Derivation parseDerivation( + const Store & store, + std::string && s, + std::string_view name, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * \todo Remove. @@ -400,7 +410,7 @@ bool isDerivation(std::string_view fileName); * This is usually -, but is just when * the output name is "out". */ -std::string outputPathName(std::string_view drvName, std::string_view outputName); +std::string outputPathName(std::string_view drvName, OutputNameView outputName); /** @@ -494,7 +504,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr * own outputs without needing to use the hash of a derivation in * itself, making the hash near-impossible to calculate. */ -std::string hashPlaceholder(const std::string_view outputName); +std::string hashPlaceholder(const OutputNameView outputName); extern const Hash impureOutputHash; diff --git a/src/libstore/derived-path-map.cc b/src/libstore/derived-path-map.cc new file mode 100644 index 000000000..5982c04b3 --- /dev/null +++ b/src/libstore/derived-path-map.cc @@ -0,0 +1,69 @@ +#include "derived-path-map.hh" + +namespace nix { + +template +typename DerivedPathMap::ChildNode & DerivedPathMap::ensureSlot(const SingleDerivedPath & k) +{ + std::function initIter; + initIter = [&](const auto & k) -> auto & { + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & bo) -> auto & { + // will not overwrite if already there + return map[bo.path]; + }, + [&](const SingleDerivedPath::Built & bfd) -> auto & { + auto & n = initIter(*bfd.drvPath); + return n.childMap[bfd.output]; + }, + }, k.raw()); + }; + return initIter(k); +} + +template +typename DerivedPathMap::ChildNode * DerivedPathMap::findSlot(const SingleDerivedPath & k) +{ + std::function initIter; + initIter = [&](const auto & k) { + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & bo) { + auto it = map.find(bo.path); + return it != map.end() + ? &it->second + : nullptr; + }, + [&](const SingleDerivedPath::Built & bfd) { + auto * n = initIter(*bfd.drvPath); + if (!n) return (ChildNode *)nullptr; + + auto it = n->childMap.find(bfd.output); + return it != n->childMap.end() + ? &it->second + : nullptr; + }, + }, k.raw()); + }; + return initIter(k); +} + +} + +// instantiations + +namespace nix { + +GENERATE_CMP_EXT( + template<>, + DerivedPathMap>::ChildNode, + me->value, + me->childMap); + +GENERATE_CMP_EXT( + template<>, + DerivedPathMap>, + me->map); + +template struct DerivedPathMap>; + +}; diff --git a/src/libstore/derived-path-map.hh b/src/libstore/derived-path-map.hh new file mode 100644 index 000000000..4d72b301e --- /dev/null +++ b/src/libstore/derived-path-map.hh @@ -0,0 +1,95 @@ +#pragma once + +#include "types.hh" +#include "derived-path.hh" + +namespace nix { + +/** + * A simple Trie, of sorts. Conceptually a map of `SingleDerivedPath` to + * values. + * + * Concretely, an n-ary tree, as described below. A + * `SingleDerivedPath::Opaque` maps to the value of an immediate child + * of the root node. A `SingleDerivedPath::Built` maps to a deeper child + * node: the `SingleDerivedPath::Built::drvPath` is first mapped to a a + * child node (inductively), and then the + * `SingleDerivedPath::Built::output` is used to look up that child's + * child via its map. In this manner, every `SingleDerivedPath` is + * mapped to a child node. + * + * @param V A type to instantiate for each output. It should probably + * should be an "optional" type so not every interior node has to have a + * value. `* const Something` or `std::optional` would be + * good choices for "optional" types. + */ +template +struct DerivedPathMap { + /** + * A child node (non-root node). + */ + struct ChildNode { + /** + * Value of this child node. + * + * @see DerivedPathMap for what `V` should be. + */ + V value; + + /** + * The map type for the root node. + */ + using Map = std::map; + + /** + * The map of the root node. + */ + Map childMap; + + DECLARE_CMP(ChildNode); + }; + + /** + * The map type for the root node. + */ + using Map = std::map; + + /** + * The map of root node. + */ + Map map; + + DECLARE_CMP(DerivedPathMap); + + /** + * Find the node for `k`, creating it if needed. + * + * The node is referred to as a "slot" on the assumption that `V` is + * some sort of optional type, so the given key can be set or unset + * by changing this node. + */ + ChildNode & ensureSlot(const SingleDerivedPath & k); + + /** + * Like `ensureSlot` but does not create the slot if it doesn't exist. + * + * Read the entire description of `ensureSlot` to understand an + * important caveat here that "have slot" does *not* imply "key is + * set in map". To ensure a key is set one would need to get the + * child node (with `findSlot` or `ensureSlot`) *and* check the + * `ChildNode::value`. + */ + ChildNode * findSlot(const SingleDerivedPath & k); +}; + + +DECLARE_CMP_EXT( + template<>, + DerivedPathMap>::, + DerivedPathMap>); +DECLARE_CMP_EXT( + template<>, + DerivedPathMap>::ChildNode::, + DerivedPathMap>::ChildNode); + +} diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 52d073f81..47d784deb 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -7,52 +7,133 @@ namespace nix { -nlohmann::json DerivedPath::Opaque::toJSON(ref store) const { +#define CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, COMPARATOR) \ + bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ + { \ + const MY_TYPE* me = this; \ + auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + me = &other; \ + auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + return fields1 COMPARATOR fields2; \ + } +#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ + CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, ==) \ + CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \ + CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <) + +#define FIELD_TYPE std::string +CMP(SingleDerivedPath, SingleDerivedPathBuilt, output) +#undef FIELD_TYPE + +#define FIELD_TYPE OutputsSpec +CMP(SingleDerivedPath, DerivedPathBuilt, outputs) +#undef FIELD_TYPE + +#undef CMP +#undef CMP_ONE + +nlohmann::json DerivedPath::Opaque::toJSON(const Store & store) const +{ + return store.printStorePath(path); +} + +nlohmann::json SingleDerivedPath::Built::toJSON(Store & store) const { nlohmann::json res; - res["path"] = store->printStorePath(path); + res["drvPath"] = drvPath->toJSON(store); + // Fallback for the input-addressed derivation case: We expect to always be + // able to print the output paths, so let’s do it + // FIXME try-resolve on drvPath + const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *drvPath)); + res["output"] = output; + auto outputPathIter = outputMap.find(output); + if (outputPathIter == outputMap.end()) + res["outputPath"] = nullptr; + else if (std::optional p = outputPathIter->second) + res["outputPath"] = store.printStorePath(*p); + else + res["outputPath"] = nullptr; return res; } -nlohmann::json DerivedPath::Built::toJSON(ref store) const { +nlohmann::json DerivedPath::Built::toJSON(Store & store) const { nlohmann::json res; - res["drvPath"] = store->printStorePath(drvPath); + res["drvPath"] = drvPath->toJSON(store); // Fallback for the input-addressed derivation case: We expect to always be // able to print the output paths, so let’s do it - const auto outputMap = store->queryPartialDerivationOutputMap(drvPath); + // FIXME try-resolve on drvPath + const auto outputMap = store.queryPartialDerivationOutputMap(resolveDerivedPath(store, *drvPath)); for (const auto & [output, outputPathOpt] : outputMap) { if (!outputs.contains(output)) continue; if (outputPathOpt) - res["outputs"][output] = store->printStorePath(*outputPathOpt); + res["outputs"][output] = store.printStorePath(*outputPathOpt); else res["outputs"][output] = nullptr; } return res; } +nlohmann::json SingleDerivedPath::toJSON(Store & store) const +{ + return std::visit([&](const auto & buildable) { + return buildable.toJSON(store); + }, raw()); +} + +nlohmann::json DerivedPath::toJSON(Store & store) const +{ + return std::visit([&](const auto & buildable) { + return buildable.toJSON(store); + }, raw()); +} + std::string DerivedPath::Opaque::to_string(const Store & store) const { return store.printStorePath(path); } +std::string SingleDerivedPath::Built::to_string(const Store & store) const +{ + return drvPath->to_string(store) + "^" + output; +} + +std::string SingleDerivedPath::Built::to_string_legacy(const Store & store) const +{ + return drvPath->to_string(store) + "!" + output; +} + std::string DerivedPath::Built::to_string(const Store & store) const { - return store.printStorePath(drvPath) + return drvPath->to_string(store) + '^' + outputs.to_string(); } std::string DerivedPath::Built::to_string_legacy(const Store & store) const { - return store.printStorePath(drvPath) - + '!' + return drvPath->to_string_legacy(store) + + "!" + outputs.to_string(); } +std::string SingleDerivedPath::to_string(const Store & store) const +{ + return std::visit( + [&](const auto & req) { return req.to_string(store); }, + raw()); +} + std::string DerivedPath::to_string(const Store & store) const +{ + return std::visit( + [&](const auto & req) { return req.to_string(store); }, + raw()); +} + +std::string SingleDerivedPath::to_string_legacy(const Store & store) const { return std::visit(overloaded { - [&](const DerivedPath::Built & req) { return req.to_string(store); }, - [&](const DerivedPath::Opaque & req) { return req.to_string(store); }, + [&](const SingleDerivedPath::Built & req) { return req.to_string_legacy(store); }, + [&](const SingleDerivedPath::Opaque & req) { return req.to_string(store); }, }, this->raw()); } @@ -70,30 +151,156 @@ DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_ return {store.parseStorePath(s)}; } -DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view drvS, std::string_view outputsS) +void drvRequireExperiment( + const SingleDerivedPath & drv, + const ExperimentalFeatureSettings & xpSettings) { + std::visit(overloaded { + [&](const SingleDerivedPath::Opaque &) { + // plain drv path; no experimental features required. + }, + [&](const SingleDerivedPath::Built &) { + xpSettings.require(Xp::DynamicDerivations); + }, + }, drv.raw()); +} + +SingleDerivedPath::Built SingleDerivedPath::Built::parse( + const Store & store, ref drv, + OutputNameView output, + const ExperimentalFeatureSettings & xpSettings) +{ + drvRequireExperiment(*drv, xpSettings); return { - .drvPath = store.parseStorePath(drvS), + .drvPath = drv, + .output = std::string { output }, + }; +} + +DerivedPath::Built DerivedPath::Built::parse( + const Store & store, ref drv, + OutputNameView outputsS, + const ExperimentalFeatureSettings & xpSettings) +{ + drvRequireExperiment(*drv, xpSettings); + return { + .drvPath = drv, .outputs = OutputsSpec::parse(outputsS), }; } -static inline DerivedPath parseWith(const Store & store, std::string_view s, std::string_view separator) +static SingleDerivedPath parseWithSingle( + const Store & store, std::string_view s, std::string_view separator, + const ExperimentalFeatureSettings & xpSettings) { - size_t n = s.find(separator); + size_t n = s.rfind(separator); + return n == s.npos + ? (SingleDerivedPath) SingleDerivedPath::Opaque::parse(store, s) + : (SingleDerivedPath) SingleDerivedPath::Built::parse(store, + make_ref(parseWithSingle( + store, + s.substr(0, n), + separator, + xpSettings)), + s.substr(n + 1), + xpSettings); +} + +SingleDerivedPath SingleDerivedPath::parse( + const Store & store, + std::string_view s, + const ExperimentalFeatureSettings & xpSettings) +{ + return parseWithSingle(store, s, "^", xpSettings); +} + +SingleDerivedPath SingleDerivedPath::parseLegacy( + const Store & store, + std::string_view s, + const ExperimentalFeatureSettings & xpSettings) +{ + return parseWithSingle(store, s, "!", xpSettings); +} + +static DerivedPath parseWith( + const Store & store, std::string_view s, std::string_view separator, + const ExperimentalFeatureSettings & xpSettings) +{ + size_t n = s.rfind(separator); return n == s.npos ? (DerivedPath) DerivedPath::Opaque::parse(store, s) - : (DerivedPath) DerivedPath::Built::parse(store, s.substr(0, n), s.substr(n + 1)); + : (DerivedPath) DerivedPath::Built::parse(store, + make_ref(parseWithSingle( + store, + s.substr(0, n), + separator, + xpSettings)), + s.substr(n + 1), + xpSettings); } -DerivedPath DerivedPath::parse(const Store & store, std::string_view s) +DerivedPath DerivedPath::parse( + const Store & store, + std::string_view s, + const ExperimentalFeatureSettings & xpSettings) { - return parseWith(store, s, "^"); + return parseWith(store, s, "^", xpSettings); } -DerivedPath DerivedPath::parseLegacy(const Store & store, std::string_view s) +DerivedPath DerivedPath::parseLegacy( + const Store & store, + std::string_view s, + const ExperimentalFeatureSettings & xpSettings) { - return parseWith(store, s, "!"); + return parseWith(store, s, "!", xpSettings); +} + +DerivedPath DerivedPath::fromSingle(const SingleDerivedPath & req) +{ + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & o) -> DerivedPath { + return o; + }, + [&](const SingleDerivedPath::Built & b) -> DerivedPath { + return DerivedPath::Built { + .drvPath = b.drvPath, + .outputs = OutputsSpec::Names { b.output }, + }; + }, + }, req.raw()); +} + +const StorePath & SingleDerivedPath::Built::getBaseStorePath() const +{ + return drvPath->getBaseStorePath(); +} + +const StorePath & DerivedPath::Built::getBaseStorePath() const +{ + return drvPath->getBaseStorePath(); +} + +template +static inline const StorePath & getBaseStorePath_(const DP & derivedPath) +{ + return std::visit(overloaded { + [&](const typename DP::Built & bfd) -> auto & { + return bfd.drvPath->getBaseStorePath(); + }, + [&](const typename DP::Opaque & bo) -> auto & { + return bo.path; + }, + }, derivedPath.raw()); +} + +const StorePath & SingleDerivedPath::getBaseStorePath() const +{ + return getBaseStorePath_(*this); +} + +const StorePath & DerivedPath::getBaseStorePath() const +{ + return getBaseStorePath_(*this); } } diff --git a/src/libstore/derived-path.hh b/src/libstore/derived-path.hh index 7a4261ce0..4d7033df2 100644 --- a/src/libstore/derived-path.hh +++ b/src/libstore/derived-path.hh @@ -24,28 +24,37 @@ class Store; struct DerivedPathOpaque { StorePath path; - nlohmann::json toJSON(ref store) const; std::string to_string(const Store & store) const; static DerivedPathOpaque parse(const Store & store, std::string_view); + nlohmann::json toJSON(const Store & store) const; GENERATE_CMP(DerivedPathOpaque, me->path); }; +struct SingleDerivedPath; + /** - * A derived path that is built from a derivation + * A single derived path that is built from a derivation * - * Built derived paths are pair of a derivation and some output names. - * They are evaluated by building the derivation, and then replacing the - * output names with the resulting outputs. - * - * Note that does mean a derived store paths evaluates to multiple - * opaque paths, which is sort of icky as expressions are supposed to - * evaluate to single values. Perhaps this should have just a single - * output name. + * Built derived paths are pair of a derivation and an output name. They are + * evaluated by building the derivation, and then taking the resulting output + * path of the given output name. */ -struct DerivedPathBuilt { - StorePath drvPath; - OutputsSpec outputs; +struct SingleDerivedPathBuilt { + ref drvPath; + OutputName output; + + /** + * Get the store path this is ultimately derived from (by realising + * and projecting outputs). + * + * Note that this is *not* a property of the store object being + * referred to, but just of this path --- how we happened to be + * referring to that store object. In other words, this means this + * function breaks "referential transparency". It should therefore + * be used only with great care. + */ + const StorePath & getBaseStorePath() const; /** * Uses `^` as the separator @@ -57,11 +66,139 @@ struct DerivedPathBuilt { std::string to_string_legacy(const Store & store) const; /** * The caller splits on the separator, so it works for both variants. + * + * @param xpSettings Stop-gap to avoid globals during unit tests. */ - static DerivedPathBuilt parse(const Store & store, std::string_view drvPath, std::string_view outputs); - nlohmann::json toJSON(ref store) const; + static SingleDerivedPathBuilt parse( + const Store & store, ref drvPath, + OutputNameView outputs, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + nlohmann::json toJSON(Store & store) const; - GENERATE_CMP(DerivedPathBuilt, me->drvPath, me->outputs); + DECLARE_CMP(SingleDerivedPathBuilt); +}; + +using _SingleDerivedPathRaw = std::variant< + DerivedPathOpaque, + SingleDerivedPathBuilt +>; + +/** + * A "derived path" is a very simple sort of expression (not a Nix + * language expression! But an expression in a the general sense) that + * evaluates to (concrete) store path. It is either: + * + * - opaque, in which case it is just a concrete store path with + * possibly no known derivation + * + * - built, in which case it is a pair of a derivation path and an + * output name. + */ +struct SingleDerivedPath : _SingleDerivedPathRaw { + using Raw = _SingleDerivedPathRaw; + using Raw::Raw; + + using Opaque = DerivedPathOpaque; + using Built = SingleDerivedPathBuilt; + + inline const Raw & raw() const { + return static_cast(*this); + } + + /** + * Get the store path this is ultimately derived from (by realising + * and projecting outputs). + * + * Note that this is *not* a property of the store object being + * referred to, but just of this path --- how we happened to be + * referring to that store object. In other words, this means this + * function breaks "referential transparency". It should therefore + * be used only with great care. + */ + const StorePath & getBaseStorePath() const; + + /** + * Uses `^` as the separator + */ + std::string to_string(const Store & store) const; + /** + * Uses `!` as the separator + */ + std::string to_string_legacy(const Store & store) const; + /** + * Uses `^` as the separator + * + * @param xpSettings Stop-gap to avoid globals during unit tests. + */ + static SingleDerivedPath parse( + const Store & store, + std::string_view, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + /** + * Uses `!` as the separator + * + * @param xpSettings Stop-gap to avoid globals during unit tests. + */ + static SingleDerivedPath parseLegacy( + const Store & store, + std::string_view, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + nlohmann::json toJSON(Store & store) const; +}; + +static inline ref makeConstantStorePathRef(StorePath drvPath) +{ + return make_ref(SingleDerivedPath::Opaque { drvPath }); +} + +/** + * A set of derived paths that are built from a derivation + * + * Built derived paths are pair of a derivation and some output names. + * They are evaluated by building the derivation, and then replacing the + * output names with the resulting outputs. + * + * Note that does mean a derived store paths evaluates to multiple + * opaque paths, which is sort of icky as expressions are supposed to + * evaluate to single values. Perhaps this should have just a single + * output name. + */ +struct DerivedPathBuilt { + ref drvPath; + OutputsSpec outputs; + + /** + * Get the store path this is ultimately derived from (by realising + * and projecting outputs). + * + * Note that this is *not* a property of the store object being + * referred to, but just of this path --- how we happened to be + * referring to that store object. In other words, this means this + * function breaks "referential transparency". It should therefore + * be used only with great care. + */ + const StorePath & getBaseStorePath() const; + + /** + * Uses `^` as the separator + */ + std::string to_string(const Store & store) const; + /** + * Uses `!` as the separator + */ + std::string to_string_legacy(const Store & store) const; + /** + * The caller splits on the separator, so it works for both variants. + * + * @param xpSettings Stop-gap to avoid globals during unit tests. + */ + static DerivedPathBuilt parse( + const Store & store, ref, + std::string_view, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + nlohmann::json toJSON(Store & store) const; + + DECLARE_CMP(DerivedPathBuilt); }; using _DerivedPathRaw = std::variant< @@ -71,13 +208,13 @@ using _DerivedPathRaw = std::variant< /** * A "derived path" is a very simple sort of expression that evaluates - * to (concrete) store path. It is either: + * to one or more (concrete) store paths. It is either: * - * - opaque, in which case it is just a concrete store path with + * - opaque, in which case it is just a single concrete store path with * possibly no known derivation * - * - built, in which case it is a pair of a derivation path and an - * output name. + * - built, in which case it is a pair of a derivation path and some + * output names. */ struct DerivedPath : _DerivedPathRaw { using Raw = _DerivedPathRaw; @@ -90,6 +227,18 @@ struct DerivedPath : _DerivedPathRaw { return static_cast(*this); } + /** + * Get the store path this is ultimately derived from (by realising + * and projecting outputs). + * + * Note that this is *not* a property of the store object being + * referred to, but just of this path --- how we happened to be + * referring to that store object. In other words, this means this + * function breaks "referential transparency". It should therefore + * be used only with great care. + */ + const StorePath & getBaseStorePath() const; + /** * Uses `^` as the separator */ @@ -100,14 +249,43 @@ struct DerivedPath : _DerivedPathRaw { std::string to_string_legacy(const Store & store) const; /** * Uses `^` as the separator + * + * @param xpSettings Stop-gap to avoid globals during unit tests. */ - static DerivedPath parse(const Store & store, std::string_view); + static DerivedPath parse( + const Store & store, + std::string_view, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** * Uses `!` as the separator + * + * @param xpSettings Stop-gap to avoid globals during unit tests. */ - static DerivedPath parseLegacy(const Store & store, std::string_view); + static DerivedPath parseLegacy( + const Store & store, + std::string_view, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + + /** + * Convert a `SingleDerivedPath` to a `DerivedPath`. + */ + static DerivedPath fromSingle(const SingleDerivedPath &); + + nlohmann::json toJSON(Store & store) const; }; typedef std::vector DerivedPaths; +/** + * Used by various parser functions to require experimental features as + * needed. + * + * Somewhat unfortunate this cannot just be an implementation detail for + * this module. + * + * @param xpSettings Stop-gap to avoid globals during unit tests. + */ +void drvRequireExperiment( + const SingleDerivedPath & drv, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); } diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc index d623c05e2..ca9f7476e 100644 --- a/src/libstore/downstream-placeholder.cc +++ b/src/libstore/downstream-placeholder.cc @@ -5,13 +5,13 @@ namespace nix { std::string DownstreamPlaceholder::render() const { - return "/" + hash.to_string(Base32, false); + return "/" + hash.to_string(HashFormat::Base32, false); } DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput( const StorePath & drvPath, - std::string_view outputName, + OutputNameView outputName, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::CaDerivations); @@ -25,17 +25,34 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput( DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( const DownstreamPlaceholder & placeholder, - std::string_view outputName, + OutputNameView outputName, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::DynamicDerivations); auto compressed = compressHash(placeholder.hash, 20); auto clearText = "nix-computed-output:" - + compressed.to_string(Base32, false) + + compressed.to_string(HashFormat::Base32, false) + ":" + std::string { outputName }; return DownstreamPlaceholder { hashString(htSHA256, clearText) }; } +DownstreamPlaceholder DownstreamPlaceholder::fromSingleDerivedPathBuilt( + const SingleDerivedPath::Built & b, + const ExperimentalFeatureSettings & xpSettings) +{ + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & o) { + return DownstreamPlaceholder::unknownCaOutput(o.path, b.output, xpSettings); + }, + [&](const SingleDerivedPath::Built & b2) { + return DownstreamPlaceholder::unknownDerivation( + DownstreamPlaceholder::fromSingleDerivedPathBuilt(b2, xpSettings), + b.output, + xpSettings); + }, + }, b.drvPath->raw()); +} + } diff --git a/src/libstore/downstream-placeholder.hh b/src/libstore/downstream-placeholder.hh index 97f77e6b8..c911ecea2 100644 --- a/src/libstore/downstream-placeholder.hh +++ b/src/libstore/downstream-placeholder.hh @@ -3,6 +3,7 @@ #include "hash.hh" #include "path.hh" +#include "derived-path.hh" namespace nix { @@ -57,7 +58,7 @@ public: */ static DownstreamPlaceholder unknownCaOutput( const StorePath & drvPath, - std::string_view outputName, + OutputNameView outputName, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); /** @@ -71,7 +72,19 @@ public: */ static DownstreamPlaceholder unknownDerivation( const DownstreamPlaceholder & drvPlaceholder, - std::string_view outputName, + OutputNameView outputName, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + + /** + * Convenience constructor that handles both cases (unknown + * content-addressed output and unknown derivation), delegating as + * needed to `unknownCaOutput` and `unknownDerivation`. + * + * Recursively builds up a placeholder from a + * `SingleDerivedPath::Built.drvPath` chain. + */ + static DownstreamPlaceholder fromSingleDerivedPathBuilt( + const SingleDerivedPath::Built & built, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); }; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index e866aeb42..91b7e30db 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -1,8 +1,8 @@ #include "serialise.hh" #include "store-api.hh" #include "archive.hh" -#include "worker-protocol.hh" -#include "worker-protocol-impl.hh" +#include "common-protocol.hh" +#include "common-protocol-impl.hh" #include @@ -41,13 +41,13 @@ void Store::exportPath(const StorePath & path, Sink & sink) Hash hash = hashSink.currentHash().first; if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) throw Error("hash of path '%s' has changed from '%s' to '%s'!", - printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true)); + printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)); teeSink << exportMagic << printStorePath(path); - WorkerProto::write(*this, - WorkerProto::WriteConn { .to = teeSink }, + CommonProto::write(*this, + CommonProto::WriteConn { .to = teeSink }, info->references); teeSink << (info->deriver ? printStorePath(*info->deriver) : "") @@ -76,8 +76,8 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) //Activity act(*logger, lvlInfo, "importing path '%s'", info.path); - auto references = WorkerProto::Serialise::read(*this, - WorkerProto::ReadConn { .from = source }); + auto references = CommonProto::Serialise::read(*this, + CommonProto::ReadConn { .from = source }); auto deriver = readString(source); auto narHash = hashString(htSHA256, saved.s); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 38b691279..a283af5a2 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -863,6 +863,8 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink) } chunk = std::move(state->data); + /* Reset state->data after the move, since we check data.empty() */ + state->data = ""; state->request.notify_one(); } diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index e4c7d072d..ddfbac521 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -43,7 +43,7 @@ static void makeSymlink(const Path & link, const Path & target) void LocalStore::addIndirectRoot(const Path & path) { - std::string hash = hashString(htSHA1, path).to_string(Base32, false); + std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false); Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash)); makeSymlink(realRoot, path); } @@ -777,7 +777,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } }; - /* Synchronisation point for testing, see tests/gc-concurrent.sh. */ + /* Synchronisation point for testing, see tests/functional/gc-concurrent.sh. */ if (auto p = getEnv("_NIX_TEST_GC_SYNC")) readFile(*p); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 5a4cb1824..9c25d9868 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -24,6 +24,9 @@ #include "config-impl.hh" +#ifdef __APPLE__ +#include +#endif namespace nix { @@ -154,6 +157,29 @@ unsigned int Settings::getDefaultCores() return concurrency; } +#if __APPLE__ +static bool hasVirt() { + + int hasVMM; + int hvSupport; + size_t size; + + size = sizeof(hasVMM); + if (sysctlbyname("kern.hv_vmm_present", &hasVMM, &size, NULL, 0) == 0) { + if (hasVMM) + return false; + } + + // whether the kernel and hardware supports virt + size = sizeof(hvSupport); + if (sysctlbyname("kern.hv_support", &hvSupport, &size, NULL, 0) == 0) { + return hvSupport == 1; + } else { + return false; + } +} +#endif + StringSet Settings::getDefaultSystemFeatures() { /* For backwards compatibility, accept some "features" that are @@ -170,6 +196,11 @@ StringSet Settings::getDefaultSystemFeatures() features.insert("kvm"); #endif + #if __APPLE__ + if (hasVirt()) + features.insert("apple-virt"); + #endif + return features; } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 7009f6bb8..e90f70f5f 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -4,6 +4,7 @@ #include "types.hh" #include "config.hh" #include "util.hh" +#include "experimental-features.hh" #include #include @@ -261,6 +262,14 @@ public: For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md) )"}; + Setting alwaysAllowSubstitutes{ + this, false, "always-allow-substitutes", + R"( + If set to `true`, Nix will ignore the `allowSubstitutes` attribute in + derivations and always attempt to use available substituters. + For more information on `allowSubstitutes`, see [the manual chapter on advanced attributes](../language/advanced-attributes.md). + )"}; + Setting buildersUseSubstitutes{ this, false, "builders-use-substitutes", R"( @@ -343,7 +352,7 @@ public: users in `build-users-group`. UIDs are allocated starting at 872415232 (0x34000000) on Linux and 56930 on macOS. - )"}; + )", {}, true, Xp::AutoAllocateUids}; Setting startId{this, #if __linux__ @@ -554,7 +563,7 @@ public: R"( This option determines the maximum size of the `tmpfs` filesystem mounted on `/dev/shm` in Linux sandboxes. For the format, see the - description of the `size` option of `tmpfs` in mount8. The default + description of the `size` option of `tmpfs` in mount(8). The default is `50%`. )"}; @@ -697,19 +706,44 @@ public: getDefaultSystemFeatures(), "system-features", R"( - A set of system “features” supported by this machine, e.g. `kvm`. - Derivations can express a dependency on such features through the - derivation attribute `requiredSystemFeatures`. For example, the - attribute + A set of system “features” supported by this machine. - requiredSystemFeatures = [ "kvm" ]; + This complements the [`system`](#conf-system) and [`extra-platforms`](#conf-extra-platforms) configuration options and the corresponding [`system`](@docroot@/language/derivations.md#attr-system) attribute on derivations. - ensures that the derivation can only be built on a machine with the - `kvm` feature. + A derivation can require system features in the [`requiredSystemFeatures` attribute](@docroot@/language/advanced-attributes.md#adv-attr-requiredSystemFeatures), and the machine to build the derivation must have them. - This setting by default includes `kvm` if `/dev/kvm` is accessible, - and the pseudo-features `nixos-test`, `benchmark` and `big-parallel` - that are used in Nixpkgs to route builds to specific machines. + System features are user-defined, but Nix sets the following defaults: + + - `apple-virt` + + Included on darwin if virtualization is available. + + - `kvm` + + Included on Linux if `/dev/kvm` is accessible. + + - `nixos-test`, `benchmark`, `big-parallel` + + These historical pseudo-features are always enabled for backwards compatibility, as they are used in Nixpkgs to route Hydra builds to specific machines. + + - `ca-derivations` + + Included by default if the [`ca-derivations` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-ca-derivations) is enabled. + + This system feature is implicitly required by derivations with the [`__contentAddressed` attribute](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed). + + - `recursive-nix` + + Included by default if the [`recursive-nix` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-recursive-nix) is enabled. + + - `uid-range` + + On Linux, Nix can run builds in a user namespace where they run as root (UID 0) and have 65,536 UIDs available. + This is primarily useful for running containers such as `systemd-nspawn` inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn]. + + [nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix. + + Included by default on Linux if the [`auto-allocate-uids`](#conf-auto-allocate-uids) setting is enabled. )", {}, false}; Setting substituters{ @@ -1031,6 +1065,25 @@ public: ``` )" }; + + Setting impureEnv {this, {}, "impure-env", + R"( + A list of items, each in the format of: + + - `name=value`: Set environment variable `name` to `value`. + + If the user is trusted (see `trusted-users` option), when building + a fixed-output derivation, environment variables set in this option + will be passed to the builder if they are listed in [`impureEnvVars`](@docroot@/language/advanced-attributes.md##adv-attr-impureEnvVars). + + This option is useful for, e.g., setting `https_proxy` for + fixed-output derivations and in a multi-user Nix installation, or + setting private access tokens when fetching a private repository. + )", + {}, // aliases + true, // document default + Xp::ConfigurableImpureEnv + }; }; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index fa17d606d..c712f7eb1 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -3,11 +3,10 @@ #include "pool.hh" #include "remote-store.hh" #include "serve-protocol.hh" +#include "serve-protocol-impl.hh" #include "build-result.hh" #include "store-api.hh" #include "path-with-outputs.hh" -#include "worker-protocol.hh" -#include "worker-protocol-impl.hh" #include "ssh.hh" #include "derivations.hh" #include "callback.hh" @@ -46,42 +45,38 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor std::unique_ptr sshConn; FdSink to; FdSource from; - int remoteVersion; + ServeProto::Version remoteVersion; bool good = true; /** - * Coercion to `WorkerProto::ReadConn`. This makes it easy to use the - * factored out worker protocol searlizers with a + * Coercion to `ServeProto::ReadConn`. This makes it easy to use the + * factored out serve protocol searlizers with a * `LegacySSHStore::Connection`. * - * The worker protocol connection types are unidirectional, unlike + * The serve protocol connection types are unidirectional, unlike * this type. - * - * @todo Use server protocol serializers, not worker protocol - * serializers, once we have made that distiction. */ - operator WorkerProto::ReadConn () + operator ServeProto::ReadConn () { - return WorkerProto::ReadConn { + return ServeProto::ReadConn { .from = from, + .version = remoteVersion, }; } /* - * Coercion to `WorkerProto::WriteConn`. This makes it easy to use the - * factored out worker protocol searlizers with a + * Coercion to `ServeProto::WriteConn`. This makes it easy to use the + * factored out serve protocol searlizers with a * `LegacySSHStore::Connection`. * - * The worker protocol connection types are unidirectional, unlike + * The serve protocol connection types are unidirectional, unlike * this type. - * - * @todo Use server protocol serializers, not worker protocol - * serializers, once we have made that distiction. */ - operator WorkerProto::WriteConn () + operator ServeProto::WriteConn () { - return WorkerProto::WriteConn { + return ServeProto::WriteConn { .to = to, + .version = remoteVersion, }; } }; @@ -183,7 +178,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = WorkerProto::Serialise::read(*this, *conn); + info->references = ServeProto::Serialise::read(*this, *conn); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -216,8 +211,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor << ServeProto::Command::AddToStoreNar << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(Base16, false); - WorkerProto::write(*this, *conn, info.references); + << info.narHash.to_string(HashFormat::Base16, false); + ServeProto::write(*this, *conn, info.references); conn->to << info.registrationTime << info.narSize @@ -246,7 +241,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor conn->to << exportMagic << printStorePath(info.path); - WorkerProto::write(*this, *conn, info.references); + ServeProto::write(*this, *conn, info.references); conn->to << (info.deriver ? printStorePath(*info.deriver) : "") << 0 @@ -324,20 +319,7 @@ public: conn->to.flush(); - BuildResult status; - status.status = (BuildResult::Status) readInt(conn->from); - conn->from >> status.errorMsg; - - if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3) - conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime; - if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) { - auto builtOutputs = WorkerProto::Serialise::read(*this, *conn); - for (auto && [output, realisation] : builtOutputs) - status.builtOutputs.insert_or_assign( - std::move(output.outputName), - std::move(realisation)); - } - return status; + return ServeProto::Serialise::read(*this, *conn); } void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override @@ -358,6 +340,9 @@ public: [&](const StorePath & drvPath) { throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); }, + [&](std::monostate) { + throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); + }, }, sOrDrvPath); } conn->to << ss; @@ -406,10 +391,10 @@ public: conn->to << ServeProto::Command::QueryClosure << includeOutputs; - WorkerProto::write(*this, *conn, paths); + ServeProto::write(*this, *conn, paths); conn->to.flush(); - for (auto & i : WorkerProto::Serialise::read(*this, *conn)) + for (auto & i : ServeProto::Serialise::read(*this, *conn)) out.insert(i); } @@ -422,10 +407,10 @@ public: << ServeProto::Command::QueryValidPaths << false // lock << maybeSubstitute; - WorkerProto::write(*this, *conn, paths); + ServeProto::write(*this, *conn, paths); conn->to.flush(); - return WorkerProto::Serialise::read(*this, *conn); + return ServeProto::Serialise::read(*this, *conn); } void connect() override diff --git a/src/libstore/length-prefixed-protocol-helper.hh b/src/libstore/length-prefixed-protocol-helper.hh new file mode 100644 index 000000000..4061b0cd6 --- /dev/null +++ b/src/libstore/length-prefixed-protocol-helper.hh @@ -0,0 +1,162 @@ +#pragma once +/** + * @file Reusable serialisers for serialization container types in a + * length-prefixed manner. + * + * Used by both the Worker and Serve protocols. + */ + +#include "types.hh" + +namespace nix { + +class Store; + +/** + * Reusable serialisers for serialization container types in a + * length-prefixed manner. + * + * @param T The type of the collection being serialised + * + * @param Inner This the most important parameter; this is the "inner" + * protocol. The user of this will substitute `MyProtocol` or similar + * when making a `MyProtocol::Serialiser>`. Note that the + * inside is allowed to call to call `Inner::Serialiser` on different + * types. This is especially important for `std::map` which doesn't have + * a single `T` but one `K` and one `V`. + */ +template +struct LengthPrefixedProtoHelper; + +/*! + * \typedef LengthPrefixedProtoHelper::S + * + * Read this as simply `using S = Inner::Serialise;`. + * + * It would be nice to use that directly, but C++ doesn't seem to allow + * it. The `typename` keyword needed to refer to `Inner` seems to greedy + * (low precedence), and then C++ complains that `Serialise` is not a + * type parameter but a real type. + * + * Making this `S` alias seems to be the only way to avoid these issues. + */ + +#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \ + struct LengthPrefixedProtoHelper< Inner, T > \ + { \ + static T read(const Store & store, typename Inner::ReadConn conn); \ + static void write(const Store & store, typename Inner::WriteConn conn, const T & str); \ + private: \ + template using S = typename Inner::template Serialise; \ + } + +template +LENGTH_PREFIXED_PROTO_HELPER(Inner, std::vector); + +template +LENGTH_PREFIXED_PROTO_HELPER(Inner, std::set); + +template +LENGTH_PREFIXED_PROTO_HELPER(Inner, std::tuple); + +template +#define _X std::map +LENGTH_PREFIXED_PROTO_HELPER(Inner, _X); +#undef _X + +template +std::vector +LengthPrefixedProtoHelper>::read( + const Store & store, typename Inner::ReadConn conn) +{ + std::vector resSet; + auto size = readNum(conn.from); + while (size--) { + resSet.push_back(S::read(store, conn)); + } + return resSet; +} + +template +void +LengthPrefixedProtoHelper>::write( + const Store & store, typename Inner::WriteConn conn, const std::vector & resSet) +{ + conn.to << resSet.size(); + for (auto & key : resSet) { + S::write(store, conn, key); + } +} + +template +std::set +LengthPrefixedProtoHelper>::read( + const Store & store, typename Inner::ReadConn conn) +{ + std::set resSet; + auto size = readNum(conn.from); + while (size--) { + resSet.insert(S::read(store, conn)); + } + return resSet; +} + +template +void +LengthPrefixedProtoHelper>::write( + const Store & store, typename Inner::WriteConn conn, const std::set & resSet) +{ + conn.to << resSet.size(); + for (auto & key : resSet) { + S::write(store, conn, key); + } +} + +template +std::map +LengthPrefixedProtoHelper>::read( + const Store & store, typename Inner::ReadConn conn) +{ + std::map resMap; + auto size = readNum(conn.from); + while (size--) { + auto k = S::read(store, conn); + auto v = S::read(store, conn); + resMap.insert_or_assign(std::move(k), std::move(v)); + } + return resMap; +} + +template +void +LengthPrefixedProtoHelper>::write( + const Store & store, typename Inner::WriteConn conn, const std::map & resMap) +{ + conn.to << resMap.size(); + for (auto & i : resMap) { + S::write(store, conn, i.first); + S::write(store, conn, i.second); + } +} + +template +std::tuple +LengthPrefixedProtoHelper>::read( + const Store & store, typename Inner::ReadConn conn) +{ + return std::tuple { + S::read(store, conn)..., + }; +} + +template +void +LengthPrefixedProtoHelper>::write( + const Store & store, typename Inner::WriteConn conn, const std::tuple & res) +{ + std::apply([&](const Us &... args) { + (S::write(store, conn, args), ...); + }, res); +} + +} diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 42347d80a..8fb2a5dba 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -832,7 +832,7 @@ uint64_t LocalStore::addValidPath(State & state, state.stmts->RegisterValidPath.use() (printStorePath(info.path)) - (info.narHash.to_string(Base16, true)) + (info.narHash.to_string(HashFormat::Base16, true)) (info.registrationTime == 0 ? time(0) : info.registrationTime) (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.narSize, info.narSize != 0) @@ -939,7 +939,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info) { state.stmts->UpdatePathInfo.use() (info.narSize, info.narSize != 0) - (info.narHash.to_string(Base16, true)) + (info.narHash.to_string(HashFormat::Base16, true)) (info.ultimate ? 1 : 0, info.ultimate) (concatStringsSep(" ", info.sigs), !info.sigs.empty()) (renderContentAddress(info.ca), (bool) info.ca) @@ -1202,6 +1202,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (checkSigs && pathInfoIsUntrusted(info)) throw Error("cannot add path '%s' because it lacks a signature by a trusted key", printStorePath(info.path)); + /* In case we are not interested in reading the NAR: discard it. */ + bool narRead = false; + Finally cleanup = [&]() { + if (!narRead) { + ParseSink sink; + parseDump(sink, source); + } + }; + addTempRoot(info.path); if (repair || !isValidPath(info.path)) { @@ -1226,13 +1235,14 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, TeeSource wrapperSource { source, hashSink }; + narRead = true; restorePath(realPath, wrapperSource); auto hashResult = hashSink.finish(); if (hashResult.first != info.narHash) throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true)); + printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true)); if (hashResult.second != info.narSize) throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", @@ -1248,8 +1258,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (specified.hash != actualHash.hash) { throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), - specified.hash.to_string(Base32, true), - actualHash.hash.to_string(Base32, true)); + specified.hash.to_string(HashFormat::Base32, true), + actualHash.hash.to_string(HashFormat::Base32, true)); } } @@ -1513,7 +1523,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) for (auto & link : readDirectory(linksDir)) { printMsg(lvlTalkative, "checking contents of '%s'", link.name); Path linkPath = linksDir + "/" + link.name; - std::string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false); + std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false); if (hash != link.name) { printError("link '%s' was modified! expected hash '%s', got '%s'", linkPath, link.name, hash); @@ -1546,7 +1556,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) if (info->narHash != nullHash && info->narHash != current.first) { printError("path '%s' was modified! expected hash '%s', got '%s'", - printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true)); + printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true)); if (repair) repairPath(i); else errors = true; } else { diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 14160dc8b..1035691c7 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -88,7 +88,7 @@ const ContentAddress * getDerivationCA(const BasicDerivation & drv) auto out = drv.outputs.find("out"); if (out == drv.outputs.end()) return nullptr; - if (auto dof = std::get_if(&out->second)) { + if (auto dof = std::get_if(&out->second.raw)) { return &dof->ca; } return nullptr; @@ -125,14 +125,26 @@ void Store::queryMissing(const std::vector & targets, std::function doPath; + std::function, const DerivedPathMap::ChildNode &)> enqueueDerivedPaths; + + enqueueDerivedPaths = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { + if (!inputNode.value.empty()) + pool.enqueue(std::bind(doPath, DerivedPath::Built { inputDrv, inputNode.value })); + for (const auto & [outputName, childNode] : inputNode.childMap) + enqueueDerivedPaths( + make_ref(SingleDerivedPath::Built { inputDrv, outputName }), + childNode); + }; + auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) { { auto state(state_.lock()); state->willBuild.insert(drvPath); } - for (auto & i : drv.inputDrvs) - pool.enqueue(std::bind(doPath, DerivedPath::Built { i.first, i.second })); + for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) { + enqueueDerivedPaths(makeConstantStorePathRef(inputDrv), inputNode); + } }; auto checkOutput = [&]( @@ -176,10 +188,18 @@ void Store::queryMissing(const std::vector & targets, std::visit(overloaded { [&](const DerivedPath::Built & bfd) { - if (!isValidPath(bfd.drvPath)) { + auto drvPathP = std::get_if(&*bfd.drvPath); + if (!drvPathP) { + // TODO make work in this case. + warn("Ignoring dynamic derivation %s while querying missing paths; not yet implemented", bfd.drvPath->to_string(*this)); + return; + } + auto & drvPath = drvPathP->path; + + if (!isValidPath(drvPath)) { // FIXME: we could try to substitute the derivation. auto state(state_.lock()); - state->unknown.insert(bfd.drvPath); + state->unknown.insert(drvPath); return; } @@ -187,7 +207,7 @@ void Store::queryMissing(const std::vector & targets, /* true for regular derivations, and CA derivations for which we have a trust mapping for all wanted outputs. */ auto knownOutputPaths = true; - for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(bfd.drvPath)) { + for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(drvPath)) { if (!pathOpt) { knownOutputPaths = false; break; @@ -197,15 +217,45 @@ void Store::queryMissing(const std::vector & targets, } if (knownOutputPaths && invalid.empty()) return; - auto drv = make_ref(derivationFromPath(bfd.drvPath)); - ParsedDerivation parsedDrv(StorePath(bfd.drvPath), *drv); + auto drv = make_ref(derivationFromPath(drvPath)); + ParsedDerivation parsedDrv(StorePath(drvPath), *drv); + + if (!knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) { + experimentalFeatureSettings.require(Xp::CaDerivations); + + // If there are unknown output paths, attempt to find if the + // paths are known to substituters through a realisation. + auto outputHashes = staticOutputHashes(*this, *drv); + knownOutputPaths = true; + + for (auto [outputName, hash] : outputHashes) { + if (!bfd.outputs.contains(outputName)) + continue; + + bool found = false; + for (auto &sub : getDefaultSubstituters()) { + auto realisation = sub->queryRealisation({hash, outputName}); + if (!realisation) + continue; + found = true; + if (!isValidPath(realisation->outPath)) + invalid.insert(realisation->outPath); + break; + } + if (!found) { + // Some paths did not have a realisation, this must be built. + knownOutputPaths = false; + break; + } + } + } if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) { auto drvState = make_ref>(DrvState(invalid.size())); for (auto & output : invalid) - pool.enqueue(std::bind(checkOutput, bfd.drvPath, drv, output, drvState)); + pool.enqueue(std::bind(checkOutput, drvPath, drv, output, drvState)); } else - mustBuildDrv(bfd.drvPath, *drv); + mustBuildDrv(drvPath, *drv); }, [&](const DerivedPath::Opaque & bo) { @@ -284,24 +334,41 @@ std::map drvOutputReferences( { std::set inputRealisations; - for (const auto & [inputDrv, outputNames] : drv.inputDrvs) { - const auto outputHashes = - staticOutputHashes(store, store.readDerivation(inputDrv)); - for (const auto & outputName : outputNames) { - auto outputHash = get(outputHashes, outputName); - if (!outputHash) - throw Error( - "output '%s' of derivation '%s' isn't realised", outputName, - store.printStorePath(inputDrv)); - auto thisRealisation = store.queryRealisation( - DrvOutput{*outputHash, outputName}); - if (!thisRealisation) - throw Error( - "output '%s' of derivation '%s' isn't built", outputName, - store.printStorePath(inputDrv)); - inputRealisations.insert(*thisRealisation); + std::function::ChildNode &)> accumRealisations; + + accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { + if (!inputNode.value.empty()) { + auto outputHashes = + staticOutputHashes(store, store.readDerivation(inputDrv)); + for (const auto & outputName : inputNode.value) { + auto outputHash = get(outputHashes, outputName); + if (!outputHash) + throw Error( + "output '%s' of derivation '%s' isn't realised", outputName, + store.printStorePath(inputDrv)); + auto thisRealisation = store.queryRealisation( + DrvOutput{*outputHash, outputName}); + if (!thisRealisation) + throw Error( + "output '%s' of derivation '%s' isn’t built", outputName, + store.printStorePath(inputDrv)); + inputRealisations.insert(*thisRealisation); + } } - } + if (!inputNode.value.empty()) { + auto d = makeConstantStorePathRef(inputDrv); + for (const auto & [outputName, childNode] : inputNode.childMap) { + SingleDerivedPath next = SingleDerivedPath::Built { d, outputName }; + accumRealisations( + // TODO deep resolutions for dynamic derivations, issue #8947, would go here. + resolveDerivedPath(store, next), + childNode); + } + } + }; + + for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) + accumRealisations(inputDrv, inputNode); auto info = store.queryPathInfo(outputPath); @@ -310,7 +377,9 @@ std::map drvOutputReferences( OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, Store * evalStore_) { - auto outputsOpt_ = store.queryPartialDerivationOutputMap(bfd.drvPath, evalStore_); + auto drvPath = resolveDerivedPath(store, *bfd.drvPath, evalStore_); + + auto outputsOpt_ = store.queryPartialDerivationOutputMap(drvPath, evalStore_); auto outputsOpt = std::visit(overloaded { [&](const OutputsSpec::All &) { @@ -325,21 +394,73 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, if (!pOutputPathOpt) throw Error( "the derivation '%s' doesn't have an output named '%s'", - store.printStorePath(bfd.drvPath), output); + bfd.drvPath->to_string(store), output); outputsOpt.insert_or_assign(output, std::move(*pOutputPathOpt)); } return outputsOpt; }, - }, bfd.outputs.raw()); + }, bfd.outputs.raw); OutputPathMap outputs; for (auto & [outputName, outputPathOpt] : outputsOpt) { if (!outputPathOpt) - throw MissingRealisation(store.printStorePath(bfd.drvPath), outputName); + throw MissingRealisation(bfd.drvPath->to_string(store), outputName); auto & outputPath = *outputPathOpt; outputs.insert_or_assign(outputName, outputPath); } return outputs; } + +StorePath resolveDerivedPath(Store & store, const SingleDerivedPath & req, Store * evalStore_) +{ + auto & evalStore = evalStore_ ? *evalStore_ : store; + + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & bo) { + return bo.path; + }, + [&](const SingleDerivedPath::Built & bfd) { + auto drvPath = resolveDerivedPath(store, *bfd.drvPath, evalStore_); + auto outputPaths = evalStore.queryPartialDerivationOutputMap(drvPath, evalStore_); + if (outputPaths.count(bfd.output) == 0) + throw Error("derivation '%s' does not have an output named '%s'", + store.printStorePath(drvPath), bfd.output); + auto & optPath = outputPaths.at(bfd.output); + if (!optPath) + throw MissingRealisation(bfd.drvPath->to_string(store), bfd.output); + return *optPath; + }, + }, req.raw()); +} + + +OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd) +{ + auto drvPath = resolveDerivedPath(store, *bfd.drvPath); + auto outputMap = store.queryDerivationOutputMap(drvPath); + auto outputsLeft = std::visit(overloaded { + [&](const OutputsSpec::All &) { + return StringSet {}; + }, + [&](const OutputsSpec::Names & names) { + return static_cast(names); + }, + }, bfd.outputs.raw); + for (auto iter = outputMap.begin(); iter != outputMap.end();) { + auto & outputName = iter->first; + if (bfd.outputs.contains(outputName)) { + outputsLeft.erase(outputName); + ++iter; + } else { + iter = outputMap.erase(iter); + } + } + if (!outputsLeft.empty()) + throw Error("derivation '%s' does not have an outputs %s", + store.printStorePath(drvPath), + concatStringsSep(", ", quoteStrings(std::get(bfd.outputs.raw)))); + return outputMap; +} + } diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index c7176d30f..cdbcf7e74 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -332,9 +332,9 @@ public: (std::string(info->path.name())) (narInfo ? narInfo->url : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0) - (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash) + (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) - (info->narHash.to_string(Base32, true)) + (info->narHash.to_string(HashFormat::Base32, true)) (info->narSize) (concatStringsSep(" ", info->shortRefs())) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index d17253741..ee2ddfd81 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -105,10 +105,10 @@ std::string NarInfo::to_string(const Store & store) const assert(compression != ""); res += "Compression: " + compression + "\n"; assert(fileHash && fileHash->type == htSHA256); - res += "FileHash: " + fileHash->to_string(Base32, true) + "\n"; + res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; assert(narHash.type == htSHA256); - res += "NarHash: " + narHash.to_string(Base32, true) + "\n"; + res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 4a79cf4a1..23c6a41e4 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -146,10 +146,10 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ Hash hash = hashPath(htSHA256, path).first; - debug("'%1%' has hash '%2%'", path, hash.to_string(Base32, true)); + debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true)); /* Check if this is a known hash. */ - Path linkPath = linksDir + "/" + hash.to_string(Base32, false); + Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false); /* Maybe delete the link, if it has been corrupted. */ if (pathExists(linkPath)) { diff --git a/src/libstore/outputs-spec.cc b/src/libstore/outputs-spec.cc index e26c38138..21c069223 100644 --- a/src/libstore/outputs-spec.cc +++ b/src/libstore/outputs-spec.cc @@ -17,7 +17,7 @@ bool OutputsSpec::contains(const std::string & outputName) const [&](const OutputsSpec::Names & outputNames) { return outputNames.count(outputName) > 0; }, - }, raw()); + }, raw); } static std::string outputSpecRegexStr = @@ -49,7 +49,7 @@ OutputsSpec OutputsSpec::parse(std::string_view s) std::optional spec = parseOpt(s); if (!spec) throw Error("invalid outputs specifier '%s'", s); - return *spec; + return std::move(*spec); } @@ -63,7 +63,7 @@ std::optional> ExtendedOutputsS auto specOpt = OutputsSpec::parseOpt(s.substr(found + 1)); if (!specOpt) return std::nullopt; - return std::pair { s.substr(0, found), ExtendedOutputsSpec::Explicit { *std::move(specOpt) } }; + return std::pair { s.substr(0, found), ExtendedOutputsSpec::Explicit { std::move(*specOpt) } }; } @@ -85,7 +85,7 @@ std::string OutputsSpec::to_string() const [&](const OutputsSpec::Names & outputNames) -> std::string { return concatStringsSep(",", outputNames); }, - }, raw()); + }, raw); } @@ -98,7 +98,7 @@ std::string ExtendedOutputsSpec::to_string() const [&](const ExtendedOutputsSpec::Explicit & outputSpec) -> std::string { return "^" + outputSpec.to_string(); }, - }, raw()); + }, raw); } @@ -118,9 +118,9 @@ OutputsSpec OutputsSpec::union_(const OutputsSpec & that) const ret.insert(thoseNames.begin(), thoseNames.end()); return ret; }, - }, that.raw()); + }, that.raw); }, - }, raw()); + }, raw); } @@ -142,9 +142,9 @@ bool OutputsSpec::isSubsetOf(const OutputsSpec & that) const ret = false; return ret; }, - }, raw()); + }, raw); }, - }, that.raw()); + }, that.raw); } } @@ -169,7 +169,7 @@ void adl_serializer::to_json(json & json, OutputsSpec t) { [&](const OutputsSpec::Names & names) { json = names; }, - }, t.raw()); + }, t.raw); } @@ -189,7 +189,7 @@ void adl_serializer::to_json(json & json, ExtendedOutputsSp [&](const ExtendedOutputsSpec::Explicit & e) { adl_serializer::to_json(json, e); }, - }, t.raw()); + }, t.raw); } } diff --git a/src/libstore/outputs-spec.hh b/src/libstore/outputs-spec.hh index 5a726fe90..1ef99a5fc 100644 --- a/src/libstore/outputs-spec.hh +++ b/src/libstore/outputs-spec.hh @@ -6,63 +6,70 @@ #include #include +#include "comparator.hh" #include "json-impls.hh" +#include "comparator.hh" +#include "variant-wrapper.hh" namespace nix { /** - * A non-empty set of outputs, specified by name + * An (owned) output name. Just a type alias used to make code more + * readible. */ -struct OutputNames : std::set { - using std::set::set; - - /* These need to be "inherited manually" */ - - OutputNames(const std::set & s) - : std::set(s) - { assert(!empty()); } - - /** - * Needs to be "inherited manually" - */ - OutputNames(std::set && s) - : std::set(s) - { assert(!empty()); } - - /* This set should always be non-empty, so we delete this - constructor in order make creating empty ones by mistake harder. - */ - OutputNames() = delete; -}; +typedef std::string OutputName; /** - * The set of all outputs, without needing to name them explicitly + * A borrowed output name. Just a type alias used to make code more + * readible. */ -struct AllOutputs : std::monostate { }; +typedef std::string_view OutputNameView; -typedef std::variant _OutputsSpecRaw; +struct OutputsSpec { + /** + * A non-empty set of outputs, specified by name + */ + struct Names : std::set { + using std::set::set; -struct OutputsSpec : _OutputsSpecRaw { - using Raw = _OutputsSpecRaw; - using Raw::Raw; + /* These need to be "inherited manually" */ + + Names(const std::set & s) + : std::set(s) + { assert(!empty()); } + + /** + * Needs to be "inherited manually" + */ + Names(std::set && s) + : std::set(s) + { assert(!empty()); } + + /* This set should always be non-empty, so we delete this + constructor in order make creating empty ones by mistake harder. + */ + Names() = delete; + }; + + /** + * The set of all outputs, without needing to name them explicitly + */ + struct All : std::monostate { }; + + typedef std::variant Raw; + + Raw raw; + + GENERATE_CMP(OutputsSpec, me->raw); + + MAKE_WRAPPER_CONSTRUCTOR(OutputsSpec); /** * Force choosing a variant */ OutputsSpec() = delete; - using Names = OutputNames; - using All = AllOutputs; - - inline const Raw & raw() const { - return static_cast(*this); - } - - inline Raw & raw() { - return static_cast(*this); - } - - bool contains(const std::string & output) const; + bool contains(const OutputName & output) const; /** * Create a new OutputsSpec which is the union of this and that. @@ -84,20 +91,22 @@ struct OutputsSpec : _OutputsSpecRaw { std::string to_string() const; }; -struct DefaultOutputs : std::monostate { }; - -typedef std::variant _ExtendedOutputsSpecRaw; - -struct ExtendedOutputsSpec : _ExtendedOutputsSpecRaw { - using Raw = _ExtendedOutputsSpecRaw; - using Raw::Raw; - - using Default = DefaultOutputs; +struct ExtendedOutputsSpec { + struct Default : std::monostate { }; using Explicit = OutputsSpec; - inline const Raw & raw() const { - return static_cast(*this); - } + typedef std::variant Raw; + + Raw raw; + + GENERATE_CMP(ExtendedOutputsSpec, me->raw); + + MAKE_WRAPPER_CONSTRUCTOR(ExtendedOutputsSpec); + + /** + * Force choosing a variant + */ + ExtendedOutputsSpec() = delete; /** * Parse a string of the form 'prefix^output1,...outputN' or diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index cc4a94fab..1d900c272 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -122,7 +122,7 @@ bool ParsedDerivation::willBuildLocally(Store & localStore) const bool ParsedDerivation::substitutesAllowed() const { - return getBoolAttr("allowSubstitutes", true); + return settings.alwaysAllowSubstitutes ? true : getBoolAttr("allowSubstitutes", true); } bool ParsedDerivation::useUidRange() const diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index ccb57104f..ab39e71f4 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -1,10 +1,27 @@ #include "path-info.hh" -#include "worker-protocol.hh" -#include "worker-protocol-impl.hh" #include "store-api.hh" namespace nix { +GENERATE_CMP_EXT( + , + UnkeyedValidPathInfo, + me->deriver, + me->narHash, + me->references, + me->registrationTime, + me->narSize, + //me->id, + me->ultimate, + me->sigs, + me->ca); + +GENERATE_CMP_EXT( + , + ValidPathInfo, + me->path, + static_cast(*me)); + std::string ValidPathInfo::fingerprint(const Store & store) const { if (narSize == 0) @@ -12,7 +29,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const store.printStorePath(path)); return "1;" + store.printStorePath(path) + ";" - + narHash.to_string(Base32, true) + ";" + + narHash.to_string(HashFormat::Base32, true) + ";" + std::to_string(narSize) + ";" + concatStringsSep(",", store.printStorePathSet(references)); } @@ -99,14 +116,13 @@ Strings ValidPathInfo::shortRefs() const return refs; } - ValidPathInfo::ValidPathInfo( const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash) - : path(store.makeFixedOutputPathFromCA(name, ca)) - , narHash(narHash) + : UnkeyedValidPathInfo(narHash) + , path(store.makeFixedOutputPathFromCA(name, ca)) { std::visit(overloaded { [this](TextInfo && ti) { @@ -128,49 +144,4 @@ ValidPathInfo::ValidPathInfo( }, std::move(ca).raw); } - -ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format) -{ - return read(source, store, format, store.parseStorePath(readString(source))); -} - -ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format, StorePath && path) -{ - auto deriver = readString(source); - auto narHash = Hash::parseAny(readString(source), htSHA256); - ValidPathInfo info(path, narHash); - if (deriver != "") info.deriver = store.parseStorePath(deriver); - info.references = WorkerProto::Serialise::read(store, - WorkerProto::ReadConn { .from = source }); - source >> info.registrationTime >> info.narSize; - if (format >= 16) { - source >> info.ultimate; - info.sigs = readStrings(source); - info.ca = ContentAddress::parseOpt(readString(source)); - } - return info; -} - - -void ValidPathInfo::write( - Sink & sink, - const Store & store, - unsigned int format, - bool includePath) const -{ - if (includePath) - sink << store.printStorePath(path); - sink << (deriver ? store.printStorePath(*deriver) : "") - << narHash.to_string(Base16, false); - WorkerProto::write(store, - WorkerProto::WriteConn { .to = sink }, - references); - sink << registrationTime << narSize; - if (format >= 16) { - sink << ultimate - << sigs - << renderContentAddress(ca); - } -} - } diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh index 221523622..a82e643ae 100644 --- a/src/libstore/path-info.hh +++ b/src/libstore/path-info.hh @@ -32,9 +32,8 @@ struct SubstitutablePathInfo typedef std::map SubstitutablePathInfos; -struct ValidPathInfo +struct UnkeyedValidPathInfo { - StorePath path; std::optional deriver; /** * \todo document this @@ -72,13 +71,19 @@ struct ValidPathInfo */ std::optional ca; - bool operator == (const ValidPathInfo & i) const - { - return - path == i.path - && narHash == i.narHash - && references == i.references; - } + UnkeyedValidPathInfo(const UnkeyedValidPathInfo & other) = default; + + UnkeyedValidPathInfo(Hash narHash) : narHash(narHash) { }; + + DECLARE_CMP(UnkeyedValidPathInfo); + + virtual ~UnkeyedValidPathInfo() { } +}; + +struct ValidPathInfo : UnkeyedValidPathInfo { + StorePath path; + + DECLARE_CMP(ValidPathInfo); /** * Return a fingerprint of the store path to be used in binary @@ -92,11 +97,11 @@ struct ValidPathInfo void sign(const Store & store, const SecretKey & secretKey); - /** - * @return The `ContentAddressWithReferences` that determines the - * store path for a content-addressed store object, `std::nullopt` - * for an input-addressed store object. - */ + /** + * @return The `ContentAddressWithReferences` that determines the + * store path for a content-addressed store object, `std::nullopt` + * for an input-addressed store object. + */ std::optional contentAddressWithReferences() const; /** @@ -122,18 +127,13 @@ struct ValidPathInfo ValidPathInfo(const ValidPathInfo & other) = default; - ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { }; - ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { }; + ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(std::move(path)) { }; + ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(path) { }; ValidPathInfo(const Store & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); virtual ~ValidPathInfo() { } - - static ValidPathInfo read(Source & source, const Store & store, unsigned int format); - static ValidPathInfo read(Source & source, const Store & store, unsigned int format, StorePath && path); - - void write(Sink & sink, const Store & store, unsigned int format, bool includePath = true) const; }; typedef std::map ValidPathInfos; diff --git a/src/libstore/path-regex.hh b/src/libstore/path-regex.hh index 4f8dc4c1f..a44e6a2eb 100644 --- a/src/libstore/path-regex.hh +++ b/src/libstore/path-regex.hh @@ -3,6 +3,6 @@ namespace nix { -static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)"; +static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-_\?=][0-9a-zA-Z\+\-\._\?=]*)"; } diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index 869b490ad..af6837370 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -16,10 +16,16 @@ std::string StorePathWithOutputs::to_string(const Store & store) const DerivedPath StorePathWithOutputs::toDerivedPath() const { if (!outputs.empty()) { - return DerivedPath::Built { path, OutputsSpec::Names { outputs } }; + return DerivedPath::Built { + .drvPath = makeConstantStorePathRef(path), + .outputs = OutputsSpec::Names { outputs }, + }; } else if (path.isDerivation()) { assert(outputs.empty()); - return DerivedPath::Built { path, OutputsSpec::All { } }; + return DerivedPath::Built { + .drvPath = makeConstantStorePathRef(path), + .outputs = OutputsSpec::All { }, + }; } else { return DerivedPath::Opaque { path }; } @@ -34,29 +40,36 @@ std::vector toDerivedPaths(const std::vector } -std::variant StorePathWithOutputs::tryFromDerivedPath(const DerivedPath & p) +StorePathWithOutputs::ParseResult StorePathWithOutputs::tryFromDerivedPath(const DerivedPath & p) { return std::visit(overloaded { - [&](const DerivedPath::Opaque & bo) -> std::variant { + [&](const DerivedPath::Opaque & bo) -> StorePathWithOutputs::ParseResult { if (bo.path.isDerivation()) { // drv path gets interpreted as "build", not "get drv file itself" return bo.path; } return StorePathWithOutputs { bo.path }; }, - [&](const DerivedPath::Built & bfd) -> std::variant { - return StorePathWithOutputs { - .path = bfd.drvPath, - // Use legacy encoding of wildcard as empty set - .outputs = std::visit(overloaded { - [&](const OutputsSpec::All &) -> StringSet { - return {}; - }, - [&](const OutputsSpec::Names & outputs) { - return static_cast(outputs); - }, - }, bfd.outputs.raw()), - }; + [&](const DerivedPath::Built & bfd) -> StorePathWithOutputs::ParseResult { + return std::visit(overloaded { + [&](const SingleDerivedPath::Opaque & bo) -> StorePathWithOutputs::ParseResult { + return StorePathWithOutputs { + .path = bo.path, + // Use legacy encoding of wildcard as empty set + .outputs = std::visit(overloaded { + [&](const OutputsSpec::All &) -> StringSet { + return {}; + }, + [&](const OutputsSpec::Names & outputs) { + return static_cast(outputs); + }, + }, bfd.outputs.raw), + }; + }, + [&](const SingleDerivedPath::Built &) -> StorePathWithOutputs::ParseResult { + return std::monostate {}; + }, + }, bfd.drvPath->raw()); }, }, p.raw()); } diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index d75850868..57e03252d 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -23,7 +23,9 @@ struct StorePathWithOutputs DerivedPath toDerivedPath() const; - static std::variant tryFromDerivedPath(const DerivedPath &); + typedef std::variant ParseResult; + + static StorePathWithOutputs::ParseResult tryFromDerivedPath(const DerivedPath &); }; std::vector toDerivedPaths(const std::vector); diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 552e83114..ec3e53232 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -11,6 +11,8 @@ static void checkName(std::string_view path, std::string_view name) if (name.size() > StorePath::MaxPathLen) throw BadStorePath("store path '%s' has a name longer than %d characters", path, StorePath::MaxPathLen); + if (name[0] == '.') + throw BadStorePath("store path '%s' starts with illegal character '.'", path); // See nameRegexStr for the definition for (auto c : name) if (!((c >= '0' && c <= '9') @@ -33,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName) } StorePath::StorePath(const Hash & hash, std::string_view _name) - : baseName((hash.to_string(Base32, false) + "-").append(std::string(_name))) + : baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name))) { checkName(baseName, name()); } diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc index 4e9955948..239047dd6 100644 --- a/src/libstore/profiles.cc +++ b/src/libstore/profiles.cc @@ -183,7 +183,7 @@ void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bo iterDropUntil(gens, i, [&](auto & g) { return g.number == curGen; }); // Skip over `max` generations, preserving them - for (auto keep = 0; i != gens.rend() && keep < max; ++i, ++keep); + for (GenerationNumber keep = 0; i != gens.rend() && keep < max; ++i, ++keep); // Delete the rest for (; i != gens.rend(); ++i) diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh index 0548b30c1..4ba2123d8 100644 --- a/src/libstore/realisation.hh +++ b/src/libstore/realisation.hh @@ -34,12 +34,12 @@ struct DrvOutput { /** * The name of the output. */ - std::string outputName; + OutputName outputName; std::string to_string() const; std::string strHash() const - { return drvHash.to_string(Base16, true); } + { return drvHash.to_string(HashFormat::Base16, true); } static DrvOutput parse(const std::string &); @@ -84,7 +84,7 @@ struct Realisation { * Since these are the outputs of a single derivation, we know the * output names are unique so we can use them as the map key. */ -typedef std::map SingleDrvOutputs; +typedef std::map SingleDrvOutputs; /** * Collection type for multiple derivations' outputs' `Realisation`s. @@ -146,7 +146,7 @@ public: MissingRealisation(DrvOutput & outputId) : MissingRealisation(outputId.outputName, outputId.strHash()) {} - MissingRealisation(std::string_view drv, std::string outputName) + MissingRealisation(std::string_view drv, OutputName outputName) : Error( "cannot operate on output '%s' of the " "unbuilt derivation '%s'", outputName, diff --git a/src/libstore/remote-store-connection.hh b/src/libstore/remote-store-connection.hh index ce4740a9c..e4a9cacb9 100644 --- a/src/libstore/remote-store-connection.hh +++ b/src/libstore/remote-store-connection.hh @@ -30,7 +30,7 @@ struct RemoteStore::Connection * sides support. (If the maximum doesn't exist, we would fail to * establish a connection and produce a value of this type.) */ - unsigned int daemonVersion; + WorkerProto::Version daemonVersion; /** * Whether the remote side trusts us or not. @@ -70,6 +70,7 @@ struct RemoteStore::Connection { return WorkerProto::ReadConn { .from = from, + .version = daemonVersion, }; } @@ -85,6 +86,7 @@ struct RemoteStore::Connection { return WorkerProto::WriteConn { .to = to, + .version = daemonVersion, }; } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 21258daec..7bdc25433 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -172,7 +172,24 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, auto ex = handle->processStderr(sink, source, flush); if (ex) { daemonException = true; - std::rethrow_exception(ex); + try { + std::rethrow_exception(ex); + } catch (const Error & e) { + // Nix versions before #4628 did not have an adequate behavior for reporting that the derivation format was upgraded. + // To avoid having to add compatibility logic in many places, we expect to catch almost all occurrences of the + // old incomprehensible error here, so that we can explain to users what's going on when their daemon is + // older than #4628 (2023). + if (experimentalFeatureSettings.isEnabled(Xp::DynamicDerivations) && + GET_PROTOCOL_MINOR(handle->daemonVersion) <= 35) + { + auto m = e.msg(); + if (m.find("parsing derivation") != std::string::npos && + m.find("expected string") != std::string::npos && + m.find("Derive([") != std::string::npos) + throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw dervation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); + } + throw; + } } } @@ -315,7 +332,8 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path, if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path)); } info = std::make_shared( - ValidPathInfo::read(conn->from, *this, GET_PROTOCOL_MINOR(conn->daemonVersion), StorePath{path})); + StorePath{path}, + WorkerProto::Serialise::read(*this, *conn)); } callback(std::move(info)); } catch (...) { callback.rethrow(); } @@ -428,7 +446,7 @@ ref RemoteStore::addCAToStore( } return make_ref( - ValidPathInfo::read(conn->from, *this, GET_PROTOCOL_MINOR(conn->daemonVersion))); + WorkerProto::Serialise::read(*this, *conn)); } else { if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25"); @@ -524,7 +542,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn->to << WorkerProto::Op::AddToStoreNar << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(Base16, false); + << info.narHash.to_string(HashFormat::Base16, false); WorkerProto::write(*this, *conn, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) @@ -553,7 +571,12 @@ void RemoteStore::addMultipleToStore( auto source = sinkToSource([&](Sink & sink) { sink << pathsToCopy.size(); for (auto & [pathInfo, pathSource] : pathsToCopy) { - pathInfo.write(sink, *this, 16); + WorkerProto::Serialise::write(*this, + WorkerProto::WriteConn { + .to = sink, + .version = 16, + }, + pathInfo); pathSource->drainInto(sink); } }); @@ -638,30 +661,6 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id, } catch (...) { return callback.rethrow(); } } -static void writeDerivedPaths(RemoteStore & store, RemoteStore::Connection & conn, const std::vector & reqs) -{ - if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 30) { - WorkerProto::write(store, conn, reqs); - } else { - Strings ss; - for (auto & p : reqs) { - auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); - std::visit(overloaded { - [&](const StorePathWithOutputs & s) { - ss.push_back(s.to_string(store)); - }, - [&](const StorePath & drvPath) { - throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file", - store.printStorePath(drvPath), - GET_PROTOCOL_MAJOR(conn.daemonVersion), - GET_PROTOCOL_MINOR(conn.daemonVersion)); - }, - }, sOrDrvPath); - } - conn.to << ss; - } -} - void RemoteStore::copyDrvsFromEvalStore( const std::vector & paths, std::shared_ptr evalStore) @@ -670,9 +669,16 @@ void RemoteStore::copyDrvsFromEvalStore( /* The remote doesn't have a way to access evalStore, so copy the .drvs. */ RealisedPath::Set drvPaths2; - for (auto & i : paths) - if (auto p = std::get_if(&i)) - drvPaths2.insert(p->drvPath); + for (const auto & i : paths) { + std::visit(overloaded { + [&](const DerivedPath::Opaque & bp) { + // Do nothing, path is hopefully there already + }, + [&](const DerivedPath::Built & bp) { + drvPaths2.insert(bp.drvPath->getBaseStorePath()); + }, + }, i.raw()); + } copyClosure(*evalStore, *this, drvPaths2); } } @@ -684,7 +690,7 @@ void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMod auto conn(getConnection()); conn->to << WorkerProto::Op::BuildPaths; assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13); - writeDerivedPaths(*this, *conn, drvPaths); + WorkerProto::write(*this, *conn, drvPaths); if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15) conn->to << buildMode; else @@ -708,7 +714,7 @@ std::vector RemoteStore::buildPathsWithResults( if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 34) { conn->to << WorkerProto::Op::BuildPathsWithResults; - writeDerivedPaths(*this, *conn, paths); + WorkerProto::write(*this, *conn, paths); conn->to << buildMode; conn.processStderr(); return WorkerProto::Serialise>::read(*this, *conn); @@ -742,7 +748,8 @@ std::vector RemoteStore::buildPathsWithResults( }; OutputPathMap outputs; - auto drv = evalStore->readDerivation(bfd.drvPath); + auto drvPath = resolveDerivedPath(*evalStore, *bfd.drvPath); + auto drv = evalStore->readDerivation(drvPath); const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive auto built = resolveDerivedPath(*this, bfd, &*evalStore); for (auto & [output, outputPath] : built) { @@ -750,7 +757,7 @@ std::vector RemoteStore::buildPathsWithResults( if (!outputHash) throw Error( "the derivation '%s' doesn't have an output named '%s'", - printStorePath(bfd.drvPath), output); + printStorePath(drvPath), output); auto outputId = DrvOutput{ *outputHash, output }; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { auto realisation = @@ -787,20 +794,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD writeDerivation(conn->to, *this, drv); conn->to << buildMode; conn.processStderr(); - BuildResult res; - res.status = (BuildResult::Status) readInt(conn->from); - conn->from >> res.errorMsg; - if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) { - conn->from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime; - } - if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 28) { - auto builtOutputs = WorkerProto::Serialise::read(*this, *conn); - for (auto && [output, realisation] : builtOutputs) - res.builtOutputs.insert_or_assign( - std::move(output.outputName), - std::move(realisation)); - } - return res; + return WorkerProto::Serialise::read(*this, *conn); } @@ -901,7 +895,7 @@ void RemoteStore::queryMissing(const std::vector & targets, // to prevent a deadlock. goto fallback; conn->to << WorkerProto::Op::QueryMissing; - writeDerivedPaths(*this, *conn, targets); + WorkerProto::write(*this, *conn, targets); conn.processStderr(); willBuild = WorkerProto::Serialise::read(*this, *conn); willSubstitute = WorkerProto::Serialise::read(*this, *conn); diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh new file mode 100644 index 000000000..a3ce81026 --- /dev/null +++ b/src/libstore/serve-protocol-impl.hh @@ -0,0 +1,59 @@ +#pragma once +/** + * @file + * + * Template implementations (as opposed to mere declarations). + * + * This file is an exmample of the "impl.hh" pattern. See the + * contributing guide. + */ + +#include "serve-protocol.hh" +#include "length-prefixed-protocol-helper.hh" + +namespace nix { + +/* protocol-agnostic templates */ + +#define SERVE_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ + TEMPLATE T ServeProto::Serialise< T >::read(const Store & store, ServeProto::ReadConn conn) \ + { \ + return LengthPrefixedProtoHelper::read(store, conn); \ + } \ + TEMPLATE void ServeProto::Serialise< T >::write(const Store & store, ServeProto::WriteConn conn, const T & t) \ + { \ + LengthPrefixedProtoHelper::write(store, conn, t); \ + } + +SERVE_USE_LENGTH_PREFIX_SERIALISER(template, std::vector) +SERVE_USE_LENGTH_PREFIX_SERIALISER(template, std::set) +SERVE_USE_LENGTH_PREFIX_SERIALISER(template, std::tuple) + +#define COMMA_ , +SERVE_USE_LENGTH_PREFIX_SERIALISER( + template, + std::map) +#undef COMMA_ + +/** + * Use `CommonProto` where possible. + */ +template +struct ServeProto::Serialise +{ + static T read(const Store & store, ServeProto::ReadConn conn) + { + return CommonProto::Serialise::read(store, + CommonProto::ReadConn { .from = conn.from }); + } + static void write(const Store & store, ServeProto::WriteConn conn, const T & t) + { + CommonProto::Serialise::write(store, + CommonProto::WriteConn { .to = conn.to }, + t); + } +}; + +/* protocol-specific templates */ + +} diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc new file mode 100644 index 000000000..97a0ddf0e --- /dev/null +++ b/src/libstore/serve-protocol.cc @@ -0,0 +1,58 @@ +#include "serialise.hh" +#include "util.hh" +#include "path-with-outputs.hh" +#include "store-api.hh" +#include "build-result.hh" +#include "serve-protocol.hh" +#include "serve-protocol-impl.hh" +#include "archive.hh" + +#include + +namespace nix { + +/* protocol-specific definitions */ + +BuildResult ServeProto::Serialise::read(const Store & store, ServeProto::ReadConn conn) +{ + BuildResult status; + status.status = (BuildResult::Status) readInt(conn.from); + conn.from >> status.errorMsg; + + if (GET_PROTOCOL_MINOR(conn.version) >= 3) + conn.from + >> status.timesBuilt + >> status.isNonDeterministic + >> status.startTime + >> status.stopTime; + if (GET_PROTOCOL_MINOR(conn.version) >= 6) { + auto builtOutputs = ServeProto::Serialise::read(store, conn); + for (auto && [output, realisation] : builtOutputs) + status.builtOutputs.insert_or_assign( + std::move(output.outputName), + std::move(realisation)); + } + return status; +} + +void ServeProto::Serialise::write(const Store & store, ServeProto::WriteConn conn, const BuildResult & status) +{ + conn.to + << status.status + << status.errorMsg; + + if (GET_PROTOCOL_MINOR(conn.version) >= 3) + conn.to + << status.timesBuilt + << status.isNonDeterministic + << status.startTime + << status.stopTime; + if (GET_PROTOCOL_MINOR(conn.version) >= 6) { + DrvOutputs builtOutputs; + for (auto & [output, realisation] : status.builtOutputs) + builtOutputs.insert_or_assign(realisation.id, realisation); + ServeProto::write(store, conn, builtOutputs); + } +} + +} diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 7e43b3969..ba159f6e9 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -1,6 +1,8 @@ #pragma once ///@file +#include "common-protocol.hh" + namespace nix { #define SERVE_MAGIC_1 0x390c9deb @@ -10,6 +12,14 @@ namespace nix { #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) + +class Store; +struct Source; + +// items being serialised +struct BuildResult; + + /** * The "serve protocol", used by ssh:// stores. * @@ -22,6 +32,60 @@ struct ServeProto * Enumeration of all the request types for the protocol. */ enum struct Command : uint64_t; + + /** + * Version type for the protocol. + * + * @todo Convert to struct with separate major vs minor fields. + */ + using Version = unsigned int; + + /** + * A unidirectional read connection, to be used by the read half of the + * canonical serializers below. + */ + struct ReadConn { + Source & from; + Version version; + }; + + /** + * A unidirectional write connection, to be used by the write half of the + * canonical serializers below. + */ + struct WriteConn { + Sink & to; + Version version; + }; + + /** + * Data type for canonical pairs of serialisers for the serve protocol. + * + * See https://en.cppreference.com/w/cpp/language/adl for the broader + * concept of what is going on here. + */ + template + struct Serialise; + // This is the definition of `Serialise` we *want* to put here, but + // do not do so. + // + // See `worker-protocol.hh` for a longer explanation. +#if 0 + { + static T read(const Store & store, ReadConn conn); + static void write(const Store & store, WriteConn conn, const T & t); + }; +#endif + + /** + * Wrapper function around `ServeProto::Serialise::write` that allows us to + * infer the type instead of having to write it down explicitly. + */ + template + static void write(const Store & store, WriteConn conn, const T & t) + { + ServeProto::Serialise::write(store, conn, t); + } }; enum struct ServeProto::Command : uint64_t @@ -58,4 +122,36 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op) return s << (uint64_t) op; } +/** + * Declare a canonical serialiser pair for the worker protocol. + * + * We specialise the struct merely to indicate that we are implementing + * the function for the given type. + * + * Some sort of `template<...>` must be used with the caller for this to + * be legal specialization syntax. See below for what that looks like in + * practice. + */ +#define DECLARE_SERVE_SERIALISER(T) \ + struct ServeProto::Serialise< T > \ + { \ + static T read(const Store & store, ServeProto::ReadConn conn); \ + static void write(const Store & store, ServeProto::WriteConn conn, const T & t); \ + }; + +template<> +DECLARE_SERVE_SERIALISER(BuildResult); + +template +DECLARE_SERVE_SERIALISER(std::vector); +template +DECLARE_SERVE_SERIALISER(std::set); +template +DECLARE_SERVE_SERIALISER(std::tuple); + +#define COMMA_ , +template +DECLARE_SERVE_SERIALISER(std::map); +#undef COMMA_ + } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 1863a0876..55cdd71a9 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -12,6 +12,9 @@ #include "callback.hh" #include "remote-store.hh" #include "local-overlay-store.hh" +// FIXME this should not be here, see TODO below on +// `addMultipleToStore`. +#include "worker-protocol.hh" #include #include @@ -155,7 +158,7 @@ StorePath Store::makeStorePath(std::string_view type, StorePath Store::makeStorePath(std::string_view type, const Hash & hash, std::string_view name) const { - return makeStorePath(type, hash.to_string(Base16, true), name); + return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name); } @@ -193,7 +196,7 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf hashString(htSHA256, "fixed:out:" + makeFileIngestionPrefix(info.method) - + info.hash.to_string(Base16, true) + ":"), + + info.hash.to_string(HashFormat::Base16, true) + ":"), name); } } @@ -226,12 +229,16 @@ StorePath Store::makeFixedOutputPathFromCA(std::string_view name, const ContentA } -std::pair Store::computeStorePathForPath(std::string_view name, - const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter) const +std::pair Store::computeStorePathFromDump( + Source & dump, + std::string_view name, + FileIngestionMethod method, + HashType hashAlgo, + const StorePathSet & references) const { - Hash h = method == FileIngestionMethod::Recursive - ? hashPath(hashAlgo, srcPath, filter).first - : hashFile(hashAlgo, srcPath); + HashSink sink(hashAlgo); + dump.drainInto(sink); + auto h = sink.finish().first; FixedOutputInfo caInfo { .method = method, .hash = h, @@ -358,7 +365,13 @@ void Store::addMultipleToStore( { auto expected = readNum(source); for (uint64_t i = 0; i < expected; ++i) { - auto info = ValidPathInfo::read(source, *this, 16); + // FIXME we should not be using the worker protocol here, let + // alone the worker protocol with a hard-coded version! + auto info = WorkerProto::Serialise::read(*this, + WorkerProto::ReadConn { + .from = source, + .version = 16, + }); info.ultimate = false; addToStore(info, source, repair, checkSigs); } @@ -885,7 +898,7 @@ std::string Store::makeValidityRegistration(const StorePathSet & paths, auto info = queryPathInfo(i); if (showHash) { - s += info->narHash.to_string(Base16, false) + "\n"; + s += info->narHash.to_string(HashFormat::Base16, false) + "\n"; s += fmt("%1%\n", info->narSize); } @@ -939,7 +952,7 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor json Store::pathInfoToJSON(const StorePathSet & storePaths, bool includeImpureInfo, bool showClosureSize, - Base hashBase, + HashFormat hashFormat, AllowInvalidFlag allowInvalid) { json::array_t jsonList = json::array(); @@ -952,7 +965,7 @@ json Store::pathInfoToJSON(const StorePathSet & storePaths, jsonPath["path"] = printStorePath(info->path); jsonPath["valid"] = true; - jsonPath["narHash"] = info->narHash.to_string(hashBase, true); + jsonPath["narHash"] = info->narHash.to_string(hashFormat, true); jsonPath["narSize"] = info->narSize; { @@ -994,7 +1007,7 @@ json Store::pathInfoToJSON(const StorePathSet & storePaths, if (!narInfo->url.empty()) jsonPath["url"] = narInfo->url; if (narInfo->fileHash) - jsonPath["downloadHash"] = narInfo->fileHash->to_string(hashBase, true); + jsonPath["downloadHash"] = narInfo->fileHash->to_string(hashFormat, true); if (narInfo->fileSize) jsonPath["downloadSize"] = narInfo->fileSize; if (showClosureSize) diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index f9029ade1..e123fccc5 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -153,19 +153,22 @@ struct StoreConfig : public Config Setting priority{this, 0, "priority", R"( - Priority of this store when used as a substituter. A lower value means a higher priority. + Priority of this store when used as a [substituter](@docroot@/command-ref/conf-file.md#conf-substituters). + A lower value means a higher priority. )"}; Setting wantMassQuery{this, false, "want-mass-query", R"( - Whether this store (when used as a substituter) can be - queried efficiently for path validity. + Whether this store can be queried efficiently for path validity when used as a [substituter](@docroot@/command-ref/conf-file.md#conf-substituters). )"}; Setting systemFeatures{this, getDefaultSystemFeatures(), "system-features", - "Optional features that the system this store builds on implements (like \"kvm\")."}; + R"( + Optional [system features](@docroot@/command-ref/conf-file.md#conf-system-features) available on the system this store uses to build derivations. + Example: `"kvm"` + )" }; }; class Store : public std::enable_shared_from_this, public virtual StoreConfig @@ -289,14 +292,15 @@ public: StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const; /** - * Preparatory part of addToStore(). - * - * @return the store path to which srcPath is to be copied - * and the cryptographic hash of the contents of srcPath. + * Read-only variant of addToStoreFromDump(). It returns the store + * path to which a NAR or flat file would be written. */ - std::pair computeStorePathForPath(std::string_view name, - const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive, - HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const; + std::pair computeStorePathFromDump( + Source & dump, + std::string_view name, + FileIngestionMethod method = FileIngestionMethod::Recursive, + HashType hashAlgo = htSHA256, + const StorePathSet & references = {}) const; /** * Preparatory part of addTextToStore(). @@ -673,7 +677,7 @@ public: */ nlohmann::json pathInfoToJSON(const StorePathSet & storePaths, bool includeImpureInfo, bool showClosureSize, - Base hashBase = Base32, + HashFormat hashFormat = HashFormat::Base32, AllowInvalidFlag allowInvalid = DisallowInvalid); /** @@ -945,6 +949,7 @@ void removeTempRoots(); * Resolve the derived path completely, failing if any derivation output * is unknown. */ +StorePath resolveDerivedPath(Store &, const SingleDerivedPath &, Store * evalStore = nullptr); OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * evalStore = nullptr); diff --git a/src/libstore/tests/characterization.hh b/src/libstore/tests/characterization.hh new file mode 100644 index 000000000..5f366cb42 --- /dev/null +++ b/src/libstore/tests/characterization.hh @@ -0,0 +1,23 @@ +#pragma once +///@file + +namespace nix { + +/** + * The path to the `unit-test-data` directory. See the contributing + * guide in the manual for further details. + */ +static Path getUnitTestData() { + return getEnv("_NIX_TEST_UNIT_DATA").value(); +} + +/** + * Whether we should update "golden masters" instead of running tests + * against them. See the contributing guide in the manual for further + * details. + */ +static bool testAccept() { + return getEnv("_NIX_TEST_ACCEPT") == "1"; +} + +} diff --git a/src/libstore/tests/common-protocol.cc b/src/libstore/tests/common-protocol.cc new file mode 100644 index 000000000..61c2cb70c --- /dev/null +++ b/src/libstore/tests/common-protocol.cc @@ -0,0 +1,205 @@ +#include + +#include +#include + +#include "common-protocol.hh" +#include "common-protocol-impl.hh" +#include "build-result.hh" +#include "tests/protocol.hh" +#include "tests/characterization.hh" + +namespace nix { + +const char commonProtoDir[] = "common-protocol"; + +class CommonProtoTest : public ProtoTest +{ +public: + /** + * Golden test for `T` reading + */ + template + void readTest(PathView testStem, T value) + { + if (testAccept()) + { + GTEST_SKIP() << "Cannot read golden master because another test is also updating it"; + } + else + { + auto expected = readFile(goldenMaster(testStem)); + + T got = ({ + StringSource from { expected }; + CommonProto::Serialise::read( + *store, + CommonProto::ReadConn { .from = from }); + }); + + ASSERT_EQ(got, value); + } + } + + /** + * Golden test for `T` write + */ + template + void writeTest(PathView testStem, const T & value) + { + auto file = goldenMaster(testStem); + + StringSink to; + CommonProto::write( + *store, + CommonProto::WriteConn { .to = to }, + value); + + if (testAccept()) + { + createDirs(dirOf(file)); + writeFile(file, to.s); + GTEST_SKIP() << "Updating golden master"; + } + else + { + auto expected = readFile(file); + ASSERT_EQ(to.s, expected); + } + } +}; + +#define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \ + TEST_F(CommonProtoTest, NAME ## _read) { \ + readTest(STEM, VALUE); \ + } \ + TEST_F(CommonProtoTest, NAME ## _write) { \ + writeTest(STEM, VALUE); \ + } + +CHARACTERIZATION_TEST( + string, + "string", + (std::tuple { + "", + "hi", + "white rabbit", + "大白兔", + "oh no \0\0\0 what was that!", + })) + +CHARACTERIZATION_TEST( + storePath, + "store-path", + (std::tuple { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + })) + +CHARACTERIZATION_TEST( + contentAddress, + "content-address", + (std::tuple { + ContentAddress { + .method = TextIngestionMethod {}, + .hash = hashString(HashType::htSHA256, "Derive(...)"), + }, + ContentAddress { + .method = FileIngestionMethod::Flat, + .hash = hashString(HashType::htSHA1, "blob blob..."), + }, + ContentAddress { + .method = FileIngestionMethod::Recursive, + .hash = hashString(HashType::htSHA256, "(...)"), + }, + })) + +CHARACTERIZATION_TEST( + drvOutput, + "drv-output", + (std::tuple { + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + })) + +CHARACTERIZATION_TEST( + realisation, + "realisation", + (std::tuple { + Realisation { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .signatures = { "asdf", "qwer" }, + }, + Realisation { + .id = { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .signatures = { "asdf", "qwer" }, + .dependentRealisations = { + { + DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + }, + })) + +CHARACTERIZATION_TEST( + vector, + "vector", + (std::tuple, std::vector, std::vector, std::vector>> { + { }, + { "" }, + { "", "foo", "bar" }, + { {}, { "" }, { "", "1", "2" } }, + })) + +CHARACTERIZATION_TEST( + set, + "set", + (std::tuple, std::set, std::set, std::set>> { + { }, + { "" }, + { "", "foo", "bar" }, + { {}, { "" }, { "", "1", "2" } }, + })) + +CHARACTERIZATION_TEST( + optionalStorePath, + "optional-store-path", + (std::tuple, std::optional> { + std::nullopt, + std::optional { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + }, + })) + +CHARACTERIZATION_TEST( + optionalContentAddress, + "optional-content-address", + (std::tuple, std::optional> { + std::nullopt, + std::optional { + ContentAddress { + .method = FileIngestionMethod::Flat, + .hash = hashString(HashType::htSHA1, "blob blob..."), + }, + }, + })) + +} diff --git a/src/libstore/tests/derivation.cc b/src/libstore/tests/derivation.cc index 0e28c1f08..c360c9707 100644 --- a/src/libstore/tests/derivation.cc +++ b/src/libstore/tests/derivation.cc @@ -42,6 +42,26 @@ class ImpureDerivationTest : public DerivationTest } }; +TEST_F(DerivationTest, BadATerm_version) { + ASSERT_THROW( + parseDerivation( + *store, + R"(DrvWithVersion("invalid-version",[],[("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv",["cat","dog"])],["/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"],"wasm-sel4","foo",["bar","baz"],[("BIG_BAD","WOLF")]))", + "whatever", + mockXpSettings), + FormatError); +} + +TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) { + ASSERT_THROW( + parseDerivation( + *store, + R"(Derive([],[("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv",(["cat","dog"],[("cat",["kitten"]),("goose",["gosling"])]))],["/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"],"wasm-sel4","foo",["bar","baz"],[("BIG_BAD","WOLF")]))", + "dyn-dep-derivation", + mockXpSettings), + FormatError); +} + #define TEST_JSON(FIXTURE, NAME, STR, VAL, DRV_NAME, OUTPUT_NAME) \ TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _to_json) { \ using nlohmann::literals::operator "" _json; \ @@ -143,34 +163,93 @@ TEST_JSON(ImpureDerivationTest, impure, #undef TEST_JSON -#define TEST_JSON(NAME, STR, VAL, DRV_NAME) \ - TEST_F(DerivationTest, Derivation_ ## NAME ## _to_json) { \ - using nlohmann::literals::operator "" _json; \ - ASSERT_EQ( \ - STR ## _json, \ - (Derivation { VAL }).toJSON(*store)); \ - } \ - \ - TEST_F(DerivationTest, Derivation_ ## NAME ## _from_json) { \ - using nlohmann::literals::operator "" _json; \ - ASSERT_EQ( \ - Derivation { VAL }, \ - Derivation::fromJSON( \ - *store, \ - STR ## _json)); \ +#define TEST_JSON(FIXTURE, NAME, STR, VAL) \ + TEST_F(FIXTURE, Derivation_ ## NAME ## _to_json) { \ + using nlohmann::literals::operator "" _json; \ + ASSERT_EQ( \ + STR ## _json, \ + (VAL).toJSON(*store)); \ + } \ + \ + TEST_F(FIXTURE, Derivation_ ## NAME ## _from_json) { \ + using nlohmann::literals::operator "" _json; \ + ASSERT_EQ( \ + (VAL), \ + Derivation::fromJSON( \ + *store, \ + STR ## _json, \ + mockXpSettings)); \ } -TEST_JSON(simple, +#define TEST_ATERM(FIXTURE, NAME, STR, VAL, DRV_NAME) \ + TEST_F(FIXTURE, Derivation_ ## NAME ## _to_aterm) { \ + ASSERT_EQ( \ + STR, \ + (VAL).unparse(*store, false)); \ + } \ + \ + TEST_F(FIXTURE, Derivation_ ## NAME ## _from_aterm) { \ + auto parsed = parseDerivation( \ + *store, \ + STR, \ + DRV_NAME, \ + mockXpSettings); \ + ASSERT_EQ( \ + (VAL).toJSON(*store), \ + parsed.toJSON(*store)); \ + ASSERT_EQ( \ + (VAL), \ + parsed); \ + } + +Derivation makeSimpleDrv(const Store & store) { + Derivation drv; + drv.name = "simple-derivation"; + drv.inputSrcs = { + store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), + }; + drv.inputDrvs = { + .map = { + { + store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), + { + .value = { + "cat", + "dog", + }, + }, + }, + }, + }; + drv.platform = "wasm-sel4"; + drv.builder = "foo"; + drv.args = { + "bar", + "baz", + }; + drv.env = { + { + "BIG_BAD", + "WOLF", + }, + }; + return drv; +} + +TEST_JSON(DerivationTest, simple, R"({ - "name": "my-derivation", + "name": "simple-derivation", "inputSrcs": [ "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" ], "inputDrvs": { - "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": [ - "cat", - "dog" - ] + "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "dynamicOutputs": {}, + "outputs": [ + "cat", + "dog" + ] + } }, "system": "wasm-sel4", "builder": "foo", @@ -183,37 +262,108 @@ TEST_JSON(simple, }, "outputs": {} })", - ({ - Derivation drv; - drv.name = "my-derivation"; - drv.inputSrcs = { - store->parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), - }; - drv.inputDrvs = { + makeSimpleDrv(*store)) + +TEST_ATERM(DerivationTest, simple, + R"(Derive([],[("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv",["cat","dog"])],["/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"],"wasm-sel4","foo",["bar","baz"],[("BIG_BAD","WOLF")]))", + makeSimpleDrv(*store), + "simple-derivation") + +Derivation makeDynDepDerivation(const Store & store) { + Derivation drv; + drv.name = "dyn-dep-derivation"; + drv.inputSrcs = { + store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"), + }; + drv.inputDrvs = { + .map = { { - store->parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), - { - "cat", - "dog", + store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"), + DerivedPathMap::ChildNode { + .value = { + "cat", + "dog", + }, + .childMap = { + { + "cat", + DerivedPathMap::ChildNode { + .value = { + "kitten", + }, + }, + }, + { + "goose", + DerivedPathMap::ChildNode { + .value = { + "gosling", + }, + }, + }, + }, }, - } - }; - drv.platform = "wasm-sel4"; - drv.builder = "foo"; - drv.args = { - "bar", - "baz", - }; - drv.env = { - { - "BIG_BAD", - "WOLF", }, - }; - drv; - }), - "drv-name") + }, + }; + drv.platform = "wasm-sel4"; + drv.builder = "foo"; + drv.args = { + "bar", + "baz", + }; + drv.env = { + { + "BIG_BAD", + "WOLF", + }, + }; + return drv; +} + +TEST_JSON(DynDerivationTest, dynDerivationDeps, + R"({ + "name": "dyn-dep-derivation", + "inputSrcs": [ + "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + ], + "inputDrvs": { + "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "dynamicOutputs": { + "cat": { + "dynamicOutputs": {}, + "outputs": ["kitten"] + }, + "goose": { + "dynamicOutputs": {}, + "outputs": ["gosling"] + } + }, + "outputs": [ + "cat", + "dog" + ] + } + }, + "system": "wasm-sel4", + "builder": "foo", + "args": [ + "bar", + "baz" + ], + "env": { + "BIG_BAD": "WOLF" + }, + "outputs": {} + })", + makeDynDepDerivation(*store)) + +TEST_ATERM(DynDerivationTest, dynDerivationDeps, + R"(DrvWithVersion("xp-dyn-drv",[],[("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv",(["cat","dog"],[("cat",["kitten"]),("goose",["gosling"])]))],["/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"],"wasm-sel4","foo",["bar","baz"],[("BIG_BAD","WOLF")]))", + makeDynDepDerivation(*store), + "dyn-dep-derivation") #undef TEST_JSON +#undef TEST_ATERM } diff --git a/src/libstore/tests/derived-path.cc b/src/libstore/tests/derived-path.cc index 160443ec1..3fa3c0801 100644 --- a/src/libstore/tests/derived-path.cc +++ b/src/libstore/tests/derived-path.cc @@ -17,14 +17,34 @@ Gen Arbitrary::arbitrary() }); } +Gen Arbitrary::arbitrary() +{ + return gen::just(SingleDerivedPath::Built { + .drvPath = make_ref(*gen::arbitrary()), + .output = (*gen::arbitrary()).name, + }); +} + Gen Arbitrary::arbitrary() { return gen::just(DerivedPath::Built { - .drvPath = *gen::arbitrary(), + .drvPath = make_ref(*gen::arbitrary()), .outputs = *gen::arbitrary(), }); } +Gen Arbitrary::arbitrary() +{ + switch (*gen::inRange(0, std::variant_size_v)) { + case 0: + return gen::just(*gen::arbitrary()); + case 1: + return gen::just(*gen::arbitrary()); + default: + assert(false); + } +} + Gen Arbitrary::arbitrary() { switch (*gen::inRange(0, std::variant_size_v)) { @@ -45,14 +65,73 @@ class DerivedPathTest : public LibStoreTest { }; -// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is -// no a real fixture. -// -// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args -TEST_F(DerivedPathTest, force_init) -{ +/** + * Round trip (string <-> data structure) test for + * `DerivedPath::Opaque`. + */ +TEST_F(DerivedPathTest, opaque) { + std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x"; + auto elem = DerivedPath::parse(*store, opaque); + auto * p = std::get_if(&elem); + ASSERT_TRUE(p); + ASSERT_EQ(p->path, store->parseStorePath(opaque)); + ASSERT_EQ(elem.to_string(*store), opaque); } +/** + * Round trip (string <-> data structure) test for a simpler + * `DerivedPath::Built`. + */ +TEST_F(DerivedPathTest, built_opaque) { + std::string_view built = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^bar,foo"; + auto elem = DerivedPath::parse(*store, built); + auto * p = std::get_if(&elem); + ASSERT_TRUE(p); + ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names { "foo", "bar" })); + ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { + .path = store->parseStorePath(built.substr(0, 49)), + })); + ASSERT_EQ(elem.to_string(*store), built); +} + +/** + * Round trip (string <-> data structure) test for a more complex, + * inductive `DerivedPath::Built`. + */ +TEST_F(DerivedPathTest, built_built) { + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; + mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations"); + + std::string_view built = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^foo^bar,baz"; + auto elem = DerivedPath::parse(*store, built, mockXpSettings); + auto * p = std::get_if(&elem); + ASSERT_TRUE(p); + ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names { "bar", "baz" })); + auto * drvPath = std::get_if(&*p->drvPath); + ASSERT_TRUE(drvPath); + ASSERT_EQ(drvPath->output, "foo"); + ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque { + .path = store->parseStorePath(built.substr(0, 49)), + })); + ASSERT_EQ(elem.to_string(*store), built); +} + +/** + * Without the right experimental features enabled, we cannot parse a + * complex inductive derived path. + */ +TEST_F(DerivedPathTest, built_built_xp) { + ASSERT_THROW( + DerivedPath::parse(*store, "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^foo^bar,baz"), + MissingExperimentalFeature); +} + +#ifndef COVERAGE + RC_GTEST_FIXTURE_PROP( DerivedPathTest, prop_legacy_round_rip, @@ -69,4 +148,6 @@ RC_GTEST_FIXTURE_PROP( RC_ASSERT(o == DerivedPath::parse(*store, o.to_string(*store))); } +#endif + } diff --git a/src/libstore/tests/derived-path.hh b/src/libstore/tests/derived-path.hh index 506f3ccb1..98d61f228 100644 --- a/src/libstore/tests/derived-path.hh +++ b/src/libstore/tests/derived-path.hh @@ -12,8 +12,18 @@ namespace rc { using namespace nix; template<> -struct Arbitrary { - static Gen arbitrary(); +struct Arbitrary { + static Gen arbitrary(); +}; + +template<> +struct Arbitrary { + static Gen arbitrary(); +}; + +template<> +struct Arbitrary { + static Gen arbitrary(); }; template<> diff --git a/src/libstore/tests/outputs-spec.cc b/src/libstore/tests/outputs-spec.cc index bf8deaa9d..952945185 100644 --- a/src/libstore/tests/outputs-spec.cc +++ b/src/libstore/tests/outputs-spec.cc @@ -224,6 +224,8 @@ Gen Arbitrary::arbitrary() namespace nix { +#ifndef COVERAGE + RC_GTEST_PROP( OutputsSpec, prop_round_rip, @@ -232,4 +234,6 @@ RC_GTEST_PROP( RC_ASSERT(o == OutputsSpec::parse(o.to_string())); } +#endif + } diff --git a/src/libstore/tests/path.cc b/src/libstore/tests/path.cc index 430aa0099..5a84d646c 100644 --- a/src/libstore/tests/path.cc +++ b/src/libstore/tests/path.cc @@ -39,6 +39,7 @@ TEST_DONT_PARSE(double_star, "**") TEST_DONT_PARSE(star_first, "*,foo") TEST_DONT_PARSE(star_second, "foo,*") TEST_DONT_PARSE(bang, "foo!o") +TEST_DONT_PARSE(dotfile, ".gitignore") #undef TEST_DONT_PARSE @@ -101,8 +102,12 @@ Gen Arbitrary::arbitrary() pre += '-'; break; case 64: - pre += '.'; - break; + // names aren't permitted to start with a period, + // so just fall through to the next case here + if (c != 0) { + pre += '.'; + break; + } case 65: pre += '_'; break; @@ -134,6 +139,8 @@ Gen Arbitrary::arbitrary() namespace nix { +#ifndef COVERAGE + RC_GTEST_FIXTURE_PROP( StorePathTest, prop_regex_accept, @@ -150,4 +157,6 @@ RC_GTEST_FIXTURE_PROP( RC_ASSERT(p == store->parseStorePath(store->printStorePath(p))); } +#endif + } diff --git a/src/libstore/tests/protocol.hh b/src/libstore/tests/protocol.hh new file mode 100644 index 000000000..496915745 --- /dev/null +++ b/src/libstore/tests/protocol.hh @@ -0,0 +1,91 @@ +#include +#include + +#include "tests/libstore.hh" +#include "tests/characterization.hh" + +namespace nix { + +template +class ProtoTest : public LibStoreTest +{ +protected: + Path unitTestData = getUnitTestData() + "/libstore/" + protocolDir; + + Path goldenMaster(std::string_view testStem) { + return unitTestData + "/" + testStem + ".bin"; + } +}; + +template +class VersionedProtoTest : public ProtoTest +{ +public: + /** + * Golden test for `T` reading + */ + template + void readTest(PathView testStem, typename Proto::Version version, T value) + { + if (testAccept()) + { + GTEST_SKIP() << "Cannot read golden master because another test is also updating it"; + } + else + { + auto expected = readFile(ProtoTest::goldenMaster(testStem)); + + T got = ({ + StringSource from { expected }; + Proto::template Serialise::read( + *LibStoreTest::store, + typename Proto::ReadConn { + .from = from, + .version = version, + }); + }); + + ASSERT_EQ(got, value); + } + } + + /** + * Golden test for `T` write + */ + template + void writeTest(PathView testStem, typename Proto::Version version, const T & value) + { + auto file = ProtoTest::goldenMaster(testStem); + + StringSink to; + Proto::write( + *LibStoreTest::store, + typename Proto::WriteConn { + .to = to, + .version = version, + }, + value); + + if (testAccept()) + { + createDirs(dirOf(file)); + writeFile(file, to.s); + GTEST_SKIP() << "Updating golden master"; + } + else + { + auto expected = readFile(file); + ASSERT_EQ(to.s, expected); + } + } +}; + +#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \ + TEST_F(FIXTURE, NAME ## _read) { \ + readTest(STEM, VERSION, VALUE); \ + } \ + TEST_F(FIXTURE, NAME ## _write) { \ + writeTest(STEM, VERSION, VALUE); \ + } + +} diff --git a/src/libstore/tests/serve-protocol.cc b/src/libstore/tests/serve-protocol.cc new file mode 100644 index 000000000..c8ac87a04 --- /dev/null +++ b/src/libstore/tests/serve-protocol.cc @@ -0,0 +1,279 @@ +#include + +#include +#include + +#include "serve-protocol.hh" +#include "serve-protocol-impl.hh" +#include "build-result.hh" +#include "tests/protocol.hh" +#include "tests/characterization.hh" + +namespace nix { + +const char serveProtoDir[] = "serve-protocol"; + +struct ServeProtoTest : VersionedProtoTest +{ + /** + * For serializers that don't care about the minimum version, we + * used the oldest one: 1.0. + */ + ServeProto::Version defaultVersion = 2 << 8 | 0; +}; + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + string, + "string", + defaultVersion, + (std::tuple { + "", + "hi", + "white rabbit", + "大白兔", + "oh no \0\0\0 what was that!", + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + storePath, + "store-path", + defaultVersion, + (std::tuple { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + contentAddress, + "content-address", + defaultVersion, + (std::tuple { + ContentAddress { + .method = TextIngestionMethod {}, + .hash = hashString(HashType::htSHA256, "Derive(...)"), + }, + ContentAddress { + .method = FileIngestionMethod::Flat, + .hash = hashString(HashType::htSHA1, "blob blob..."), + }, + ContentAddress { + .method = FileIngestionMethod::Recursive, + .hash = hashString(HashType::htSHA256, "(...)"), + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + drvOutput, + "drv-output", + defaultVersion, + (std::tuple { + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + realisation, + "realisation", + defaultVersion, + (std::tuple { + Realisation { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .signatures = { "asdf", "qwer" }, + }, + Realisation { + .id = { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .signatures = { "asdf", "qwer" }, + .dependentRealisations = { + { + DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + buildResult_2_2, + "build-result-2.2", + 2 << 8 | 2, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::Built, + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + buildResult_2_3, + "build-result-2.3", + 2 << 8 | 3, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + BuildResult { + .status = BuildResult::Built, + .startTime = 30, + .stopTime = 50, + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + buildResult_2_6, + "build-result-2.6", + 2 << 8 | 6, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + BuildResult { + .status = BuildResult::Built, + .timesBuilt = 1, + .builtOutputs = { + { + "foo", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + { + "bar", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, + }, + }, + }, + .startTime = 30, + .stopTime = 50, +#if 0 + // These fields are not yet serialized. + // FIXME Include in next version of protocol or document + // why they are skipped. + .cpuUser = std::chrono::milliseconds(500s), + .cpuSystem = std::chrono::milliseconds(604s), +#endif + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + vector, + "vector", + defaultVersion, + (std::tuple, std::vector, std::vector, std::vector>> { + { }, + { "" }, + { "", "foo", "bar" }, + { {}, { "" }, { "", "1", "2" } }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + set, + "set", + defaultVersion, + (std::tuple, std::set, std::set, std::set>> { + { }, + { "" }, + { "", "foo", "bar" }, + { {}, { "" }, { "", "1", "2" } }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + optionalStorePath, + "optional-store-path", + defaultVersion, + (std::tuple, std::optional> { + std::nullopt, + std::optional { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + optionalContentAddress, + "optional-content-address", + defaultVersion, + (std::tuple, std::optional> { + std::nullopt, + std::optional { + ContentAddress { + .method = FileIngestionMethod::Flat, + .hash = hashString(HashType::htSHA1, "blob blob..."), + }, + }, + })) + +} diff --git a/src/libstore/tests/worker-protocol.cc b/src/libstore/tests/worker-protocol.cc new file mode 100644 index 000000000..ad5943c69 --- /dev/null +++ b/src/libstore/tests/worker-protocol.cc @@ -0,0 +1,547 @@ +#include + +#include +#include + +#include "worker-protocol.hh" +#include "worker-protocol-impl.hh" +#include "derived-path.hh" +#include "build-result.hh" +#include "tests/protocol.hh" +#include "tests/characterization.hh" + +namespace nix { + +const char workerProtoDir[] = "worker-protocol"; + +struct WorkerProtoTest : VersionedProtoTest +{ + /** + * For serializers that don't care about the minimum version, we + * used the oldest one: 1.0. + */ + WorkerProto::Version defaultVersion = 1 << 8 | 0; +}; + + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + string, + "string", + defaultVersion, + (std::tuple { + "", + "hi", + "white rabbit", + "大白兔", + "oh no \0\0\0 what was that!", + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + storePath, + "store-path", + defaultVersion, + (std::tuple { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + contentAddress, + "content-address", + defaultVersion, + (std::tuple { + ContentAddress { + .method = TextIngestionMethod {}, + .hash = hashString(HashType::htSHA256, "Derive(...)"), + }, + ContentAddress { + .method = FileIngestionMethod::Flat, + .hash = hashString(HashType::htSHA1, "blob blob..."), + }, + ContentAddress { + .method = FileIngestionMethod::Recursive, + .hash = hashString(HashType::htSHA256, "(...)"), + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + derivedPath_1_29, + "derived-path-1.29", + 1 << 8 | 29, + (std::tuple { + DerivedPath::Opaque { + .path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + DerivedPath::Built { + .drvPath = makeConstantStorePathRef(StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::All { }, + }, + DerivedPath::Built { + .drvPath = makeConstantStorePathRef(StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::Names { "x", "y" }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + derivedPath_1_30, + "derived-path-1.30", + 1 << 8 | 30, + (std::tuple { + DerivedPath::Opaque { + .path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + DerivedPath::Opaque { + .path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" }, + }, + DerivedPath::Built { + .drvPath = makeConstantStorePathRef(StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::All { }, + }, + DerivedPath::Built { + .drvPath = makeConstantStorePathRef(StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::Names { "x", "y" }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + drvOutput, + "drv-output", + defaultVersion, + (std::tuple { + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + realisation, + "realisation", + defaultVersion, + (std::tuple { + Realisation { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .signatures = { "asdf", "qwer" }, + }, + Realisation { + .id = { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + .signatures = { "asdf", "qwer" }, + .dependentRealisations = { + { + DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "quux", + }, + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + buildResult_1_27, + "build-result-1.27", + 1 << 8 | 27, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::Built, + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + buildResult_1_28, + "build-result-1.28", + 1 << 8 | 28, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::Built, + .builtOutputs = { + { + "foo", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + { + "bar", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, + }, + }, + }, + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + buildResult_1_29, + "build-result-1.29", + 1 << 8 | 29, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + BuildResult { + .status = BuildResult::Built, + .timesBuilt = 1, + .builtOutputs = { + { + "foo", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + { + "bar", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, + }, + }, + }, + .startTime = 30, + .stopTime = 50, +#if 0 + // These fields are not yet serialized. + // FIXME Include in next version of protocol or document + // why they are skipped. + .cpuUser = std::chrono::milliseconds(500s), + .cpuSystem = std::chrono::milliseconds(604s), +#endif + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + keyedBuildResult_1_29, + "keyed-build-result-1.29", + 1 << 8 | 29, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + KeyedBuildResult { + { + .status = KeyedBuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + /* .path = */ DerivedPath::Opaque { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-xxx" }, + }, + }, + KeyedBuildResult { + { + .status = KeyedBuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + /* .path = */ DerivedPath::Built { + .drvPath = makeConstantStorePathRef(StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }), + .outputs = OutputsSpec::Names { "out" }, + }, + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + unkeyedValidPathInfo_1_15, + "unkeyed-valid-path-info-1.15", + 1 << 8 | 15, + (std::tuple { + ({ + UnkeyedValidPathInfo info { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.registrationTime = 23423; + info.narSize = 34878; + info; + }), + ({ + UnkeyedValidPathInfo info { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", + }, + }; + info.registrationTime = 23423; + info.narSize = 34878; + info; + }), + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + validPathInfo_1_15, + "valid-path-info-1.15", + 1 << 8 | 15, + (std::tuple { + ({ + ValidPathInfo info { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + UnkeyedValidPathInfo { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }, + }; + info.registrationTime = 23423; + info.narSize = 34878; + info; + }), + ({ + ValidPathInfo info { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + UnkeyedValidPathInfo { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }, + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + // other reference + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo", + }, + // self reference + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }; + info.registrationTime = 23423; + info.narSize = 34878; + info; + }), + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + validPathInfo_1_16, + "valid-path-info-1.16", + 1 << 8 | 16, + (std::tuple { + ({ + ValidPathInfo info { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + UnkeyedValidPathInfo { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }, + }; + info.registrationTime = 23423; + info.narSize = 34878; + info.ultimate = true; + info; + }), + ({ + ValidPathInfo info { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + UnkeyedValidPathInfo { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }, + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + // other reference + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo", + }, + // self reference + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }; + info.registrationTime = 23423; + info.narSize = 34878; + info.sigs = { + "fake-sig-1", + "fake-sig-2", + }, + info; + }), + ({ + ValidPathInfo info { + *LibStoreTest::store, + "foo", + FixedOutputInfo { + .method = FileIngestionMethod::Recursive, + .hash = hashString(HashType::htSHA256, "(...)"), + .references = { + .others = { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, + }, + }, + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.registrationTime = 23423; + info.narSize = 34878; + info; + }), + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + optionalTrustedFlag, + "optional-trusted-flag", + defaultVersion, + (std::tuple, std::optional, std::optional> { + std::nullopt, + std::optional { Trusted }, + std::optional { NotTrusted }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + vector, + "vector", + defaultVersion, + (std::tuple, std::vector, std::vector, std::vector>> { + { }, + { "" }, + { "", "foo", "bar" }, + { {}, { "" }, { "", "1", "2" } }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + set, + "set", + defaultVersion, + (std::tuple, std::set, std::set, std::set>> { + { }, + { "" }, + { "", "foo", "bar" }, + { {}, { "" }, { "", "1", "2" } }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + optionalStorePath, + "optional-store-path", + defaultVersion, + (std::tuple, std::optional> { + std::nullopt, + std::optional { + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + optionalContentAddress, + "optional-content-address", + defaultVersion, + (std::tuple, std::optional> { + std::nullopt, + std::optional { + ContentAddress { + .method = FileIngestionMethod::Flat, + .hash = hashString(HashType::htSHA1, "blob blob..."), + }, + }, + })) + +} diff --git a/src/libstore/worker-protocol-impl.hh b/src/libstore/worker-protocol-impl.hh index d3d2792ff..c043588d6 100644 --- a/src/libstore/worker-protocol-impl.hh +++ b/src/libstore/worker-protocol-impl.hh @@ -9,70 +9,51 @@ */ #include "worker-protocol.hh" +#include "length-prefixed-protocol-helper.hh" namespace nix { +/* protocol-agnostic templates */ + +#define WORKER_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \ + TEMPLATE T WorkerProto::Serialise< T >::read(const Store & store, WorkerProto::ReadConn conn) \ + { \ + return LengthPrefixedProtoHelper::read(store, conn); \ + } \ + TEMPLATE void WorkerProto::Serialise< T >::write(const Store & store, WorkerProto::WriteConn conn, const T & t) \ + { \ + LengthPrefixedProtoHelper::write(store, conn, t); \ + } + +WORKER_USE_LENGTH_PREFIX_SERIALISER(template, std::vector) +WORKER_USE_LENGTH_PREFIX_SERIALISER(template, std::set) +WORKER_USE_LENGTH_PREFIX_SERIALISER(template, std::tuple) + +#define COMMA_ , +WORKER_USE_LENGTH_PREFIX_SERIALISER( + template, + std::map) +#undef COMMA_ + +/** + * Use `CommonProto` where possible. + */ template -std::vector WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) +struct WorkerProto::Serialise { - std::vector resSet; - auto size = readNum(conn.from); - while (size--) { - resSet.push_back(WorkerProto::Serialise::read(store, conn)); + static T read(const Store & store, WorkerProto::ReadConn conn) + { + return CommonProto::Serialise::read(store, + CommonProto::ReadConn { .from = conn.from }); } - return resSet; -} + static void write(const Store & store, WorkerProto::WriteConn conn, const T & t) + { + CommonProto::Serialise::write(store, + CommonProto::WriteConn { .to = conn.to }, + t); + } +}; -template -void WorkerProto::Serialise>::write(const Store & store, WorkerProto::WriteConn conn, const std::vector & resSet) -{ - conn.to << resSet.size(); - for (auto & key : resSet) { - WorkerProto::Serialise::write(store, conn, key); - } -} - -template -std::set WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) -{ - std::set resSet; - auto size = readNum(conn.from); - while (size--) { - resSet.insert(WorkerProto::Serialise::read(store, conn)); - } - return resSet; -} - -template -void WorkerProto::Serialise>::write(const Store & store, WorkerProto::WriteConn conn, const std::set & resSet) -{ - conn.to << resSet.size(); - for (auto & key : resSet) { - WorkerProto::Serialise::write(store, conn, key); - } -} - -template -std::map WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) -{ - std::map resMap; - auto size = readNum(conn.from); - while (size--) { - auto k = WorkerProto::Serialise::read(store, conn); - auto v = WorkerProto::Serialise::read(store, conn); - resMap.insert_or_assign(std::move(k), std::move(v)); - } - return resMap; -} - -template -void WorkerProto::Serialise>::write(const Store & store, WorkerProto::WriteConn conn, const std::map & resMap) -{ - conn.to << resMap.size(); - for (auto & i : resMap) { - WorkerProto::Serialise::write(store, conn, i.first); - WorkerProto::Serialise::write(store, conn, i.second); - } -} +/* protocol-specific templates */ } diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc index a23130743..d618b9bd8 100644 --- a/src/libstore/worker-protocol.cc +++ b/src/libstore/worker-protocol.cc @@ -6,33 +6,13 @@ #include "worker-protocol.hh" #include "worker-protocol-impl.hh" #include "archive.hh" -#include "derivations.hh" +#include "path-info.hh" #include namespace nix { -std::string WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) -{ - return readString(conn.from); -} - -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const std::string & str) -{ - conn.to << str; -} - - -StorePath WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) -{ - return store.parseStorePath(readString(conn.from)); -} - -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const StorePath & storePath) -{ - conn.to << store.printStorePath(storePath); -} - +/* protocol-specific definitions */ std::optional WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) { @@ -68,52 +48,37 @@ void WorkerProto::Serialise>::write(const Store & sto } -ContentAddress WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) -{ - return ContentAddress::parse(readString(conn.from)); -} - -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const ContentAddress & ca) -{ - conn.to << renderContentAddress(ca); -} - - DerivedPath WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) { auto s = readString(conn.from); - return DerivedPath::parseLegacy(store, s); + if (GET_PROTOCOL_MINOR(conn.version) >= 30) { + return DerivedPath::parseLegacy(store, s); + } else { + return parsePathWithOutputs(store, s).toDerivedPath(); + } } void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const DerivedPath & req) { - conn.to << req.to_string_legacy(store); -} - - -Realisation WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) -{ - std::string rawInput = readString(conn.from); - return Realisation::fromJSON( - nlohmann::json::parse(rawInput), - "remote-protocol" - ); -} - -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const Realisation & realisation) -{ - conn.to << realisation.toJSON().dump(); -} - - -DrvOutput WorkerProto::Serialise::read(const Store & store, WorkerProto::ReadConn conn) -{ - return DrvOutput::parse(readString(conn.from)); -} - -void WorkerProto::Serialise::write(const Store & store, WorkerProto::WriteConn conn, const DrvOutput & drvOutput) -{ - conn.to << drvOutput.to_string(); + if (GET_PROTOCOL_MINOR(conn.version) >= 30) { + conn.to << req.to_string_legacy(store); + } else { + auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(req); + std::visit(overloaded { + [&](const StorePathWithOutputs & s) { + conn.to << s.to_string(store); + }, + [&](const StorePath & drvPath) { + throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file", + store.printStorePath(drvPath), + GET_PROTOCOL_MAJOR(conn.version), + GET_PROTOCOL_MINOR(conn.version)); + }, + [&](std::monostate) { + throw Error("wanted to build a derivation that is itself a build product, but protocols do not support that. Try upgrading the Nix on the other end of this connection"); + }, + }, sOrDrvPath); + } } @@ -138,17 +103,21 @@ BuildResult WorkerProto::Serialise::read(const Store & store, Worke { BuildResult res; res.status = (BuildResult::Status) readInt(conn.from); - conn.from - >> res.errorMsg - >> res.timesBuilt - >> res.isNonDeterministic - >> res.startTime - >> res.stopTime; - auto builtOutputs = WorkerProto::Serialise::read(store, conn); - for (auto && [output, realisation] : builtOutputs) - res.builtOutputs.insert_or_assign( - std::move(output.outputName), - std::move(realisation)); + conn.from >> res.errorMsg; + if (GET_PROTOCOL_MINOR(conn.version) >= 29) { + conn.from + >> res.timesBuilt + >> res.isNonDeterministic + >> res.startTime + >> res.stopTime; + } + if (GET_PROTOCOL_MINOR(conn.version) >= 28) { + auto builtOutputs = WorkerProto::Serialise::read(store, conn); + for (auto && [output, realisation] : builtOutputs) + res.builtOutputs.insert_or_assign( + std::move(output.outputName), + std::move(realisation)); + } return res; } @@ -156,38 +125,68 @@ void WorkerProto::Serialise::write(const Store & store, WorkerProto { conn.to << res.status - << res.errorMsg - << res.timesBuilt - << res.isNonDeterministic - << res.startTime - << res.stopTime; - DrvOutputs builtOutputs; - for (auto & [output, realisation] : res.builtOutputs) - builtOutputs.insert_or_assign(realisation.id, realisation); - WorkerProto::write(store, conn, builtOutputs); + << res.errorMsg; + if (GET_PROTOCOL_MINOR(conn.version) >= 29) { + conn.to + << res.timesBuilt + << res.isNonDeterministic + << res.startTime + << res.stopTime; + } + if (GET_PROTOCOL_MINOR(conn.version) >= 28) { + DrvOutputs builtOutputs; + for (auto & [output, realisation] : res.builtOutputs) + builtOutputs.insert_or_assign(realisation.id, realisation); + WorkerProto::write(store, conn, builtOutputs); + } } -std::optional WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) +ValidPathInfo WorkerProto::Serialise::read(const Store & store, ReadConn conn) { - auto s = readString(conn.from); - return s == "" ? std::optional {} : store.parseStorePath(s); + auto path = WorkerProto::Serialise::read(store, conn); + return ValidPathInfo { + std::move(path), + WorkerProto::Serialise::read(store, conn), + }; } -void WorkerProto::Serialise>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional & storePathOpt) +void WorkerProto::Serialise::write(const Store & store, WriteConn conn, const ValidPathInfo & pathInfo) { - conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); + WorkerProto::write(store, conn, pathInfo.path); + WorkerProto::write(store, conn, static_cast(pathInfo)); } -std::optional WorkerProto::Serialise>::read(const Store & store, WorkerProto::ReadConn conn) +UnkeyedValidPathInfo WorkerProto::Serialise::read(const Store & store, ReadConn conn) { - return ContentAddress::parseOpt(readString(conn.from)); + auto deriver = readString(conn.from); + auto narHash = Hash::parseAny(readString(conn.from), htSHA256); + UnkeyedValidPathInfo info(narHash); + if (deriver != "") info.deriver = store.parseStorePath(deriver); + info.references = WorkerProto::Serialise::read(store, conn); + conn.from >> info.registrationTime >> info.narSize; + if (GET_PROTOCOL_MINOR(conn.version) >= 16) { + conn.from >> info.ultimate; + info.sigs = readStrings(conn.from); + info.ca = ContentAddress::parseOpt(readString(conn.from)); + } + return info; } -void WorkerProto::Serialise>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional & caOpt) +void WorkerProto::Serialise::write(const Store & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo) { - conn.to << (caOpt ? renderContentAddress(*caOpt) : ""); + conn.to + << (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : "") + << pathInfo.narHash.to_string(HashFormat::Base16, false); + WorkerProto::write(store, conn, pathInfo.references); + conn.to << pathInfo.registrationTime << pathInfo.narSize; + if (GET_PROTOCOL_MINOR(conn.version) >= 16) { + conn.to + << pathInfo.ultimate + << pathInfo.sigs + << renderContentAddress(pathInfo.ca); + } } } diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index ff762c924..dcd54ad16 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "serialise.hh" +#include "common-protocol.hh" namespace nix { @@ -29,10 +29,10 @@ struct Source; // items being serialised struct DerivedPath; -struct DrvOutput; -struct Realisation; struct BuildResult; struct KeyedBuildResult; +struct ValidPathInfo; +struct UnkeyedValidPathInfo; enum TrustedFlag : bool; @@ -49,26 +49,29 @@ struct WorkerProto */ enum struct Op : uint64_t; + /** + * Version type for the protocol. + * + * @todo Convert to struct with separate major vs minor fields. + */ + using Version = unsigned int; + /** * A unidirectional read connection, to be used by the read half of the * canonical serializers below. - * - * This currently is just a `Source &`, but more fields will be added - * later. */ struct ReadConn { Source & from; + Version version; }; /** * A unidirectional write connection, to be used by the write half of the * canonical serializers below. - * - * This currently is just a `Sink &`, but more fields will be added - * later. */ struct WriteConn { Sink & to; + Version version; }; /** @@ -191,58 +194,36 @@ inline std::ostream & operator << (std::ostream & s, WorkerProto::Op op) * be legal specialization syntax. See below for what that looks like in * practice. */ -#define MAKE_WORKER_PROTO(T) \ - struct WorkerProto::Serialise< T > { \ +#define DECLARE_WORKER_SERIALISER(T) \ + struct WorkerProto::Serialise< T > \ + { \ static T read(const Store & store, WorkerProto::ReadConn conn); \ static void write(const Store & store, WorkerProto::WriteConn conn, const T & t); \ }; template<> -MAKE_WORKER_PROTO(std::string); +DECLARE_WORKER_SERIALISER(DerivedPath); template<> -MAKE_WORKER_PROTO(StorePath); +DECLARE_WORKER_SERIALISER(BuildResult); template<> -MAKE_WORKER_PROTO(ContentAddress); +DECLARE_WORKER_SERIALISER(KeyedBuildResult); template<> -MAKE_WORKER_PROTO(DerivedPath); +DECLARE_WORKER_SERIALISER(ValidPathInfo); template<> -MAKE_WORKER_PROTO(Realisation); +DECLARE_WORKER_SERIALISER(UnkeyedValidPathInfo); template<> -MAKE_WORKER_PROTO(DrvOutput); -template<> -MAKE_WORKER_PROTO(BuildResult); -template<> -MAKE_WORKER_PROTO(KeyedBuildResult); -template<> -MAKE_WORKER_PROTO(std::optional); +DECLARE_WORKER_SERIALISER(std::optional); template -MAKE_WORKER_PROTO(std::vector); +DECLARE_WORKER_SERIALISER(std::vector); template -MAKE_WORKER_PROTO(std::set); +DECLARE_WORKER_SERIALISER(std::set); +template +DECLARE_WORKER_SERIALISER(std::tuple); +#define COMMA_ , template -#define X_ std::map -MAKE_WORKER_PROTO(X_); -#undef X_ - -/** - * These use the empty string for the null case, relying on the fact - * that the underlying types never serialise to the empty string. - * - * We do this instead of a generic std::optional instance because - * ordinal tags (0 or 1, here) are a bit of a compatability hazard. For - * the same reason, we don't have a std::variant instances (ordinal - * tags 0...n). - * - * We could the generic instances and then these as specializations for - * compatability, but that's proven a bit finnicky, and also makes the - * worker protocol harder to implement in other languages where such - * specializations may not be allowed. - */ -template<> -MAKE_WORKER_PROTO(std::optional); -template<> -MAKE_WORKER_PROTO(std::optional); +DECLARE_WORKER_SERIALISER(std::map); +#undef COMMA_ } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 268a798d9..3b1a1e0ef 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -14,6 +14,7 @@ #include "archive.hh" #include "util.hh" #include "config.hh" +#include "posix-source-accessor.hh" namespace nix { @@ -27,8 +28,6 @@ struct ArchiveSettings : Config #endif "use-case-hack", "Whether to enable a Darwin-specific hack for dealing with file name collisions."}; - Setting preallocateContents{this, false, "preallocate-contents", - "Whether to preallocate files when writing objects with known size."}; }; static ArchiveSettings archiveSettings; @@ -38,91 +37,87 @@ static GlobalConfig::Register rArchiveSettings(&archiveSettings); PathFilter defaultPathFilter = [](const Path &) { return true; }; -static void dumpContents(const Path & path, off_t size, - Sink & sink) +void SourceAccessor::dumpPath( + const CanonPath & path, + Sink & sink, + PathFilter & filter) { - sink << "contents" << size; + auto dumpContents = [&](const CanonPath & path) + { + sink << "contents"; + std::optional size; + readFile(path, sink, [&](uint64_t _size) + { + size = _size; + sink << _size; + }); + assert(size); + writePadding(*size, sink); + }; - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC); - if (!fd) throw SysError("opening file '%1%'", path); + std::function dump; - std::vector buf(65536); - size_t left = size; + dump = [&](const CanonPath & path) { + checkInterrupt(); - while (left > 0) { - auto n = std::min(left, buf.size()); - readFull(fd.get(), buf.data(), n); - left -= n; - sink({buf.data(), n}); - } + auto st = lstat(path); - writePadding(size, sink); -} + sink << "("; + if (st.type == tRegular) { + sink << "type" << "regular"; + if (st.isExecutable) + sink << "executable" << ""; + dumpContents(path); + } -static time_t dump(const Path & path, Sink & sink, PathFilter & filter) -{ - checkInterrupt(); + else if (st.type == tDirectory) { + sink << "type" << "directory"; - auto st = lstat(path); - time_t result = st.st_mtime; + /* If we're on a case-insensitive system like macOS, undo + the case hack applied by restorePath(). */ + std::map unhacked; + for (auto & i : readDirectory(path)) + if (archiveSettings.useCaseHack) { + std::string name(i.first); + size_t pos = i.first.find(caseHackSuffix); + if (pos != std::string::npos) { + debug("removing case hack suffix from '%s'", path + i.first); + name.erase(pos); + } + if (!unhacked.emplace(name, i.first).second) + throw Error("file name collision in between '%s' and '%s'", + (path + unhacked[name]), + (path + i.first)); + } else + unhacked.emplace(i.first, i.first); - sink << "("; - - if (S_ISREG(st.st_mode)) { - sink << "type" << "regular"; - if (st.st_mode & S_IXUSR) - sink << "executable" << ""; - dumpContents(path, st.st_size, sink); - } - - else if (S_ISDIR(st.st_mode)) { - sink << "type" << "directory"; - - /* If we're on a case-insensitive system like macOS, undo - the case hack applied by restorePath(). */ - std::map unhacked; - for (auto & i : readDirectory(path)) - if (archiveSettings.useCaseHack) { - std::string name(i.name); - size_t pos = i.name.find(caseHackSuffix); - if (pos != std::string::npos) { - debug("removing case hack suffix from '%1%'", path + "/" + i.name); - name.erase(pos); + for (auto & i : unhacked) + if (filter((path + i.first).abs())) { + sink << "entry" << "(" << "name" << i.first << "node"; + dump(path + i.second); + sink << ")"; } - if (!unhacked.emplace(name, i.name).second) - throw Error("file name collision in between '%1%' and '%2%'", - (path + "/" + unhacked[name]), - (path + "/" + i.name)); - } else - unhacked.emplace(i.name, i.name); + } - for (auto & i : unhacked) - if (filter(path + "/" + i.first)) { - sink << "entry" << "(" << "name" << i.first << "node"; - auto tmp_mtime = dump(path + "/" + i.second, sink, filter); - if (tmp_mtime > result) { - result = tmp_mtime; - } - sink << ")"; - } - } + else if (st.type == tSymlink) + sink << "type" << "symlink" << "target" << readLink(path); - else if (S_ISLNK(st.st_mode)) - sink << "type" << "symlink" << "target" << readLink(path); + else throw Error("file '%s' has an unsupported type", path); - else throw Error("file '%1%' has an unsupported type", path); + sink << ")"; + }; - sink << ")"; - - return result; + sink << narVersionMagic1; + dump(path); } time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - sink << narVersionMagic1; - return dump(path, sink, filter); + PosixSourceAccessor accessor; + accessor.dumpPath(CanonPath::fromCwd(path), sink, filter); + return accessor.mtime; } void dumpPath(const Path & path, Sink & sink, PathFilter & filter) @@ -143,17 +138,6 @@ static SerialisationError badArchive(const std::string & s) } -#if 0 -static void skipGeneric(Source & source) -{ - if (readString(source) == "(") { - while (readString(source) != ")") - skipGeneric(source); - } -} -#endif - - static void parseContents(ParseSink & sink, Source & source, const Path & path) { uint64_t size = readLongLong(source); @@ -302,71 +286,6 @@ void parseDump(ParseSink & sink, Source & source) } -struct RestoreSink : ParseSink -{ - Path dstPath; - AutoCloseFD fd; - - void createDirectory(const Path & path) override - { - Path p = dstPath + path; - if (mkdir(p.c_str(), 0777) == -1) - throw SysError("creating directory '%1%'", p); - }; - - void createRegularFile(const Path & path) override - { - Path p = dstPath + path; - fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666); - if (!fd) throw SysError("creating file '%1%'", p); - } - - void closeRegularFile() override - { - /* Call close explicitly to make sure the error is checked */ - fd.close(); - } - - void isExecutable() override - { - struct stat st; - if (fstat(fd.get(), &st) == -1) - throw SysError("fstat"); - if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1) - throw SysError("fchmod"); - } - - void preallocateContents(uint64_t len) override - { - if (!archiveSettings.preallocateContents) - return; - -#if HAVE_POSIX_FALLOCATE - if (len) { - errno = posix_fallocate(fd.get(), 0, len); - /* Note that EINVAL may indicate that the underlying - filesystem doesn't support preallocation (e.g. on - OpenSolaris). Since preallocation is just an - optimisation, ignore it. */ - if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS) - throw SysError("preallocating file of %1% bytes", len); - } -#endif - } - - void receiveContents(std::string_view data) override - { - writeFull(fd.get(), data); - } - - void createSymlink(const Path & path, const std::string & target) override - { - Path p = dstPath + path; - nix::createSymlink(target, p); - } -}; - - void restorePath(const Path & path, Source & source) { RestoreSink sink; diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 2cf164a41..3530783c1 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -3,6 +3,7 @@ #include "types.hh" #include "serialise.hh" +#include "fs-sink.hh" namespace nix { @@ -72,22 +73,6 @@ time_t dumpPathAndGetMtime(const Path & path, Sink & sink, */ void dumpString(std::string_view s, Sink & sink); -/** - * \todo Fix this API, it sucks. - */ -struct ParseSink -{ - virtual void createDirectory(const Path & path) { }; - - virtual void createRegularFile(const Path & path) { }; - virtual void closeRegularFile() { }; - virtual void isExecutable() { }; - virtual void preallocateContents(uint64_t size) { }; - virtual void receiveContents(std::string_view data) { }; - - virtual void createSymlink(const Path & path, const std::string & target) { }; -}; - /** * If the NAR archive contains a single file at top-level, then save * the contents of the file to `s`. Otherwise barf. diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 3cf3ed9ca..6bc3cae07 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -1,4 +1,5 @@ #include "args.hh" +#include "args/root.hh" #include "hash.hh" #include "json-utils.hh" @@ -26,6 +27,11 @@ void Args::removeFlag(const std::string & longName) longFlags.erase(flag); } +void Completions::setType(AddCompletions::Type t) +{ + type = t; +} + void Completions::add(std::string completion, std::string description) { description = trim(description); @@ -37,7 +43,7 @@ void Completions::add(std::string completion, std::string description) if (needs_ellipsis) description.append(" [...]"); } - insert(Completion { + completions.insert(Completion { .completion = completion, .description = description }); @@ -46,12 +52,20 @@ void Completions::add(std::string completion, std::string description) bool Completion::operator<(const Completion & other) const { return completion < other.completion || (completion == other.completion && description < other.description); } -CompletionType completionType = ctNormal; -std::shared_ptr completions; - std::string completionMarker = "___COMPLETE___"; -static std::optional needsCompletion(std::string_view s) +RootArgs & Args::getRoot() +{ + Args * p = this; + while (p->parent) + p = p->parent; + + auto * res = dynamic_cast(p); + assert(res); + return *res; +} + +std::optional RootArgs::needsCompletion(std::string_view s) { if (!completions) return {}; auto i = s.find(completionMarker); @@ -60,7 +74,7 @@ static std::optional needsCompletion(std::string_view s) return {}; } -void Args::parseCmdline(const Strings & _cmdline) +void RootArgs::parseCmdline(const Strings & _cmdline) { Strings pendingArgs; bool dashDash = false; @@ -71,7 +85,7 @@ void Args::parseCmdline(const Strings & _cmdline) size_t n = std::stoi(*s); assert(n > 0 && n <= cmdline.size()); *std::next(cmdline.begin(), n - 1) += completionMarker; - completions = std::make_shared(); + completions = std::make_shared(); verbosity = lvlError; } @@ -125,17 +139,23 @@ void Args::parseCmdline(const Strings & _cmdline) for (auto & f : flagExperimentalFeatures) experimentalFeatureSettings.require(f); + /* Now that all the other args are processed, run the deferred completions. + */ + for (auto d : deferredCompletions) + d.completer(*completions, d.n, d.prefix); } bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) { assert(pos != end); + auto & rootArgs = getRoot(); + auto process = [&](const std::string & name, const Flag & flag) -> bool { ++pos; if (auto & f = flag.experimentalFeature) - flagExperimentalFeatures.insert(*f); + rootArgs.flagExperimentalFeatures.insert(*f); std::vector args; bool anyCompleted = false; @@ -146,10 +166,15 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) "flag '%s' requires %d argument(s), but only %d were given", name, flag.handler.arity, n); } - if (auto prefix = needsCompletion(*pos)) { + if (auto prefix = rootArgs.needsCompletion(*pos)) { anyCompleted = true; - if (flag.completer) - flag.completer(n, *prefix); + if (flag.completer) { + rootArgs.deferredCompletions.push_back({ + .completer = flag.completer, + .n = n, + .prefix = *prefix, + }); + } } args.push_back(*pos++); } @@ -159,14 +184,14 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) }; if (std::string(*pos, 0, 2) == "--") { - if (auto prefix = needsCompletion(*pos)) { + if (auto prefix = rootArgs.needsCompletion(*pos)) { for (auto & [name, flag] : longFlags) { if (!hiddenCategories.count(flag->category) && hasPrefix(name, std::string(*prefix, 2))) { if (auto & f = flag->experimentalFeature) - flagExperimentalFeatures.insert(*f); - completions->add("--" + name, flag->description); + rootArgs.flagExperimentalFeatures.insert(*f); + rootArgs.completions->add("--" + name, flag->description); } } return false; @@ -183,12 +208,12 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) return process(std::string("-") + c, *i->second); } - if (auto prefix = needsCompletion(*pos)) { + if (auto prefix = rootArgs.needsCompletion(*pos)) { if (prefix == "-") { - completions->add("--"); + rootArgs.completions->add("--"); for (auto & [flagName, flag] : shortFlags) if (experimentalFeatureSettings.isEnabled(flag->experimentalFeature)) - completions->add(std::string("-") + flagName, flag->description); + rootArgs.completions->add(std::string("-") + flagName, flag->description); } } @@ -203,6 +228,8 @@ bool Args::processArgs(const Strings & args, bool finish) return true; } + auto & rootArgs = getRoot(); + auto & exp = expectedArgs.front(); bool res = false; @@ -211,15 +238,23 @@ bool Args::processArgs(const Strings & args, bool finish) (exp.handler.arity != ArityAny && args.size() == exp.handler.arity)) { std::vector ss; + bool anyCompleted = false; for (const auto &[n, s] : enumerate(args)) { - if (auto prefix = needsCompletion(s)) { + if (auto prefix = rootArgs.needsCompletion(s)) { + anyCompleted = true; ss.push_back(*prefix); - if (exp.completer) - exp.completer(n, *prefix); + if (exp.completer) { + rootArgs.deferredCompletions.push_back({ + .completer = exp.completer, + .n = n, + .prefix = *prefix, + }); + } } else ss.push_back(s); } - exp.handler.fun(ss); + if (!anyCompleted) + exp.handler.fun(ss); expectedArgs.pop_front(); res = true; } @@ -236,6 +271,7 @@ nlohmann::json Args::toJSON() for (auto & [name, flag] : longFlags) { auto j = nlohmann::json::object(); + j["hiddenCategory"] = hiddenCategories.count(flag->category) > 0; if (flag->aliases.count(name)) continue; if (flag->shortName) j["shortName"] = std::string(1, flag->shortName); @@ -270,11 +306,11 @@ nlohmann::json Args::toJSON() return res; } -static void hashTypeCompleter(size_t index, std::string_view prefix) +static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { for (auto & type : hashTypes) if (hasPrefix(type, prefix)) - completions->add(type); + completions.add(type); } Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht) @@ -286,7 +322,7 @@ Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht) .handler = {[ht](std::string s) { *ht = parseHashType(s); }}, - .completer = hashTypeCompleter + .completer = hashTypeCompleter, }; } @@ -299,13 +335,13 @@ Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional< .handler = {[oht](std::string s) { *oht = std::optional { parseHashType(s) }; }}, - .completer = hashTypeCompleter + .completer = hashTypeCompleter, }; } -static void _completePath(std::string_view prefix, bool onlyDirs) +static void _completePath(AddCompletions & completions, std::string_view prefix, bool onlyDirs) { - completionType = ctFilenames; + completions.setType(Completions::Type::Filenames); glob_t globbuf; int flags = GLOB_NOESCAPE; #ifdef GLOB_ONLYDIR @@ -319,20 +355,20 @@ static void _completePath(std::string_view prefix, bool onlyDirs) auto st = stat(globbuf.gl_pathv[i]); if (!S_ISDIR(st.st_mode)) continue; } - completions->add(globbuf.gl_pathv[i]); + completions.add(globbuf.gl_pathv[i]); } } globfree(&globbuf); } -void completePath(size_t, std::string_view prefix) +void Args::completePath(AddCompletions & completions, size_t, std::string_view prefix) { - _completePath(prefix, false); + _completePath(completions, prefix, false); } -void completeDir(size_t, std::string_view prefix) +void Args::completeDir(AddCompletions & completions, size_t, std::string_view prefix) { - _completePath(prefix, true); + _completePath(completions, prefix, true); } Strings argvToStrings(int argc, char * * argv) @@ -367,10 +403,10 @@ MultiCommand::MultiCommand(const Commands & commands_) command = {s, i->second()}; command->second->parent = this; }}, - .completer = {[&](size_t, std::string_view prefix) { + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { for (auto & [name, command] : commands) if (hasPrefix(name, prefix)) - completions->add(name); + completions.add(name); }} }); @@ -392,14 +428,6 @@ bool MultiCommand::processArgs(const Strings & args, bool finish) return Args::processArgs(args, finish); } -void MultiCommand::completionHook() -{ - if (command) - return command->second->completionHook(); - else - return Args::completionHook(); -} - nlohmann::json MultiCommand::toJSON() { auto cmds = nlohmann::json::object(); @@ -410,8 +438,8 @@ nlohmann::json MultiCommand::toJSON() auto cat = nlohmann::json::object(); cat["id"] = command->category(); cat["description"] = trim(categories[command->category()]); - j["category"] = std::move(cat); cat["experimental-feature"] = command->experimentalFeature(); + j["category"] = std::move(cat); cmds[name] = std::move(j); } diff --git a/src/libutil/args.hh b/src/libutil/args.hh index d90129796..a5d7cbe4a 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -15,16 +15,14 @@ enum HashType : char; class MultiCommand; +class RootArgs; + +class AddCompletions; + class Args { public: - /** - * Parse the command line, throwing a UsageError if something goes - * wrong. - */ - void parseCmdline(const Strings & cmdline); - /** * Return a short one-line description of the command. */ @@ -39,8 +37,21 @@ public: protected: + /** + * The largest `size_t` is used to indicate the "any" arity, for + * handlers/flags/arguments that accept an arbitrary number of + * arguments. + */ static const size_t ArityAny = std::numeric_limits::max(); + /** + * Arguments (flags/options and positional) have a "handler" which is + * caused when the argument is parsed. The handler has an arbitrary side + * effect, including possible affect further command-line parsing. + * + * There are many constructors in order to support many shorthand + * initializations, and this is used a lot. + */ struct Handler { std::function)> fun; @@ -110,7 +121,31 @@ protected: { } }; - /* Options. */ + /** + * The basic function type of the completion callback. + * + * Used to define `CompleterClosure` and some common case completers + * that individual flags/arguments can use. + * + * The `AddCompletions` that is passed is an interface to the state + * stored as part of the root command + */ + typedef void CompleterFun(AddCompletions &, size_t, std::string_view); + + /** + * The closure type of the completion callback. + * + * This is what is actually stored as part of each Flag / Expected + * Arg. + */ + typedef std::function CompleterClosure; + + /** + * Description of flags / options + * + * These are arguments like `-s` or `--long` that can (mostly) + * appear in any order. + */ struct Flag { typedef std::shared_ptr ptr; @@ -122,7 +157,7 @@ protected: std::string category; Strings labels; Handler handler; - std::function completer; + CompleterClosure completer; std::optional experimentalFeature; @@ -130,22 +165,56 @@ protected: static Flag mkHashTypeOptFlag(std::string && longName, std::optional * oht); }; + /** + * Index of all registered "long" flag descriptions (flags like + * `--long`). + */ std::map longFlags; + + /** + * Index of all registered "short" flag descriptions (flags like + * `-s`). + */ std::map shortFlags; + /** + * Process a single flag and its arguments, pulling from an iterator + * of raw CLI args as needed. + */ virtual bool processFlag(Strings::iterator & pos, Strings::iterator end); - /* Positional arguments. */ + /** + * Description of positional arguments + * + * These are arguments that do not start with a `-`, and for which + * the order does matter. + */ struct ExpectedArg { std::string label; bool optional = false; Handler handler; - std::function completer; + CompleterClosure completer; }; + /** + * Queue of expected positional argument forms. + * + * Positional arugment descriptions are inserted on the back. + * + * As positional arguments are passed, these are popped from the + * front, until there are hopefully none left as all args that were + * expected in fact were passed. + */ std::list expectedArgs; + /** + * Process some positional arugments + * + * @param finish: We have parsed everything else, and these are the only + * arguments left. Used because we accumulate some "pending args" we might + * have left over. + */ virtual bool processArgs(const Strings & args, bool finish); virtual Strings::iterator rewriteArgs(Strings & args, Strings::iterator pos) @@ -159,13 +228,6 @@ protected: */ virtual void initialFlagsProcessed() {} - /** - * Called after the command line has been processed if we need to generate - * completions. Useful for commands that need to know the whole command line - * in order to know what completions to generate. - */ - virtual void completionHook() { } - public: void addFlag(Flag && flag); @@ -200,21 +262,30 @@ public: }); } + static CompleterFun completePath; + + static CompleterFun completeDir; + virtual nlohmann::json toJSON(); friend class MultiCommand; + /** + * The parent command, used if this is a subcommand. + * + * Invariant: An Args with a null parent must also be a RootArgs + * + * \todo this would probably be better in the CommandClass. + * getRoot() could be an abstract method that peels off at most one + * layer before recuring. + */ MultiCommand * parent = nullptr; -private: - /** - * Experimental features needed when parsing args. These are checked - * after flag parsing is completed in order to support enabling - * experimental features coming after the flag that needs the - * experimental feature. + * Traverse parent pointers until we find the \ref RootArgs "root + * arguments" object. */ - std::set flagExperimentalFeatures; + RootArgs & getRoot(); }; /** @@ -236,7 +307,7 @@ struct Command : virtual public Args static constexpr Category catDefault = 0; - virtual std::optional experimentalFeature (); + virtual std::optional experimentalFeature(); virtual Category category() { return catDefault; } }; @@ -265,8 +336,6 @@ public: bool processArgs(const Strings & args, bool finish) override; - void completionHook() override; - nlohmann::json toJSON() override; }; @@ -278,21 +347,40 @@ struct Completion { bool operator<(const Completion & other) const; }; -class Completions : public std::set { + +/** + * The abstract interface for completions callbacks + * + * The idea is to restrict the callback so it can only add additional + * completions to the collection, or set the completion type. By making + * it go through this interface, the callback cannot make any other + * changes, or even view the completions / completion type that have + * been set so far. + */ +class AddCompletions +{ public: - void add(std::string completion, std::string description = ""); + + /** + * The type of completion we are collecting. + */ + enum class Type { + Normal, + Filenames, + Attrs, + }; + + /** + * Set the type of the completions being collected + * + * \todo it should not be possible to change the type after it has been set. + */ + virtual void setType(Type type) = 0; + + /** + * Add a single completion to the collection + */ + virtual void add(std::string completion, std::string description = "") = 0; }; -extern std::shared_ptr completions; - -enum CompletionType { - ctNormal, - ctFilenames, - ctAttrs -}; -extern CompletionType completionType; - -void completePath(size_t, std::string_view prefix); - -void completeDir(size_t, std::string_view prefix); } diff --git a/src/libutil/args/root.hh b/src/libutil/args/root.hh new file mode 100644 index 000000000..bb98732a1 --- /dev/null +++ b/src/libutil/args/root.hh @@ -0,0 +1,72 @@ +#pragma once + +#include "args.hh" + +namespace nix { + +/** + * The concrete implementation of a collection of completions. + * + * This is exposed so that the main entry point can print out the + * collected completions. + */ +struct Completions final : AddCompletions +{ + std::set completions; + Type type = Type::Normal; + + void setType(Type type) override; + void add(std::string completion, std::string description = "") override; +}; + +/** + * The outermost Args object. This is the one we will actually parse a command + * line with, whereas the inner ones (if they exists) are subcommands (and this + * is also a MultiCommand or something like it). + * + * This Args contains completions state shared between it and all of its + * descendent Args. + */ +class RootArgs : virtual public Args +{ +public: + /** Parse the command line, throwing a UsageError if something goes + * wrong. + */ + void parseCmdline(const Strings & cmdline); + + std::shared_ptr completions; + +protected: + + friend class Args; + + /** + * A pointer to the completion and its two arguments; a thunk; + */ + struct DeferredCompletion { + const CompleterClosure & completer; + size_t n; + std::string prefix; + }; + + /** + * Completions are run after all args and flags are parsed, so completions + * of earlier arguments can benefit from later arguments. + */ + std::vector deferredCompletions; + + /** + * Experimental features needed when parsing args. These are checked + * after flag parsing is completed in order to support enabling + * experimental features coming after the flag that needs the + * experimental feature. + */ + std::set flagExperimentalFeatures; + +private: + + std::optional needsCompletion(std::string_view s); +}; + +} diff --git a/src/libutil/comparator.hh b/src/libutil/comparator.hh index 9f661c5c3..a4d20a675 100644 --- a/src/libutil/comparator.hh +++ b/src/libutil/comparator.hh @@ -1,6 +1,49 @@ #pragma once ///@file +#define DECLARE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE) \ + PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const; +#define DECLARE_EQUAL(prefix, qualification, my_type) \ + DECLARE_ONE_CMP(prefix, qualification, ==, my_type) +#define DECLARE_LEQ(prefix, qualification, my_type) \ + DECLARE_ONE_CMP(prefix, qualification, <, my_type) +#define DECLARE_NEQ(prefix, qualification, my_type) \ + DECLARE_ONE_CMP(prefix, qualification, !=, my_type) + +#define GENERATE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE, ...) \ + PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const { \ + __VA_OPT__(const MY_TYPE * me = this;) \ + auto fields1 = std::make_tuple( __VA_ARGS__ ); \ + __VA_OPT__(me = &other;) \ + auto fields2 = std::make_tuple( __VA_ARGS__ ); \ + return fields1 COMPARATOR fields2; \ + } +#define GENERATE_EQUAL(prefix, qualification, my_type, args...) \ + GENERATE_ONE_CMP(prefix, qualification, ==, my_type, args) +#define GENERATE_LEQ(prefix, qualification, my_type, args...) \ + GENERATE_ONE_CMP(prefix, qualification, <, my_type, args) +#define GENERATE_NEQ(prefix, qualification, my_type, args...) \ + GENERATE_ONE_CMP(prefix, qualification, !=, my_type, args) + +/** + * Declare comparison methods without defining them. + */ +#define DECLARE_CMP(my_type) \ + DECLARE_EQUAL(,,my_type) \ + DECLARE_LEQ(,,my_type) \ + DECLARE_NEQ(,,my_type) + +/** + * @param prefix This is for something before each declaration like + * `template`. + * + * @param my_type the type are defining operators for. + */ +#define DECLARE_CMP_EXT(prefix, qualification, my_type) \ + DECLARE_EQUAL(prefix, qualification, my_type) \ + DECLARE_LEQ(prefix, qualification, my_type) \ + DECLARE_NEQ(prefix, qualification, my_type) + /** * Awful hacky generation of the comparison operators by doing a lexicographic * comparison between the choosen fields. @@ -17,18 +60,18 @@ * } * ``` */ -#define GENERATE_ONE_CMP(COMPARATOR, MY_TYPE, ...) \ - bool operator COMPARATOR(const MY_TYPE& other) const { \ - __VA_OPT__(const MY_TYPE* me = this;) \ - auto fields1 = std::make_tuple( __VA_ARGS__ ); \ - __VA_OPT__(me = &other;) \ - auto fields2 = std::make_tuple( __VA_ARGS__ ); \ - return fields1 COMPARATOR fields2; \ - } -#define GENERATE_EQUAL(args...) GENERATE_ONE_CMP(==, args) -#define GENERATE_LEQ(args...) GENERATE_ONE_CMP(<, args) -#define GENERATE_NEQ(args...) GENERATE_ONE_CMP(!=, args) #define GENERATE_CMP(args...) \ - GENERATE_EQUAL(args) \ - GENERATE_LEQ(args) \ - GENERATE_NEQ(args) + GENERATE_EQUAL(,,args) \ + GENERATE_LEQ(,,args) \ + GENERATE_NEQ(,,args) + +/** + * @param prefix This is for something before each declaration like + * `template`. + * + * @param my_type the type are defining operators for. + */ +#define GENERATE_CMP_EXT(prefix, my_type, args...) \ + GENERATE_EQUAL(prefix, my_type ::, my_type, args) \ + GENERATE_LEQ(prefix, my_type ::, my_type, args) \ + GENERATE_NEQ(prefix, my_type ::, my_type, args) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 38d406e8a..8e06273ee 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -9,6 +9,10 @@ namespace nix { +Config::Config(StringMap initials) + : AbstractConfig(std::move(initials)) +{ } + bool Config::set(const std::string & name, const std::string & value) { bool append = false; @@ -29,9 +33,9 @@ bool Config::set(const std::string & name, const std::string & value) void Config::addSetting(AbstractSetting * setting) { - _settings.emplace(setting->name, Config::SettingData(false, setting)); + _settings.emplace(setting->name, Config::SettingData{false, setting}); for (auto & alias : setting->aliases) - _settings.emplace(alias, Config::SettingData(true, setting)); + _settings.emplace(alias, Config::SettingData{true, setting}); bool set = false; @@ -59,6 +63,10 @@ void Config::addSetting(AbstractSetting * setting) } } +AbstractConfig::AbstractConfig(StringMap initials) + : unknownSettings(std::move(initials)) +{ } + void AbstractConfig::warnUnknownSettings() { for (auto & s : unknownSettings) @@ -68,6 +76,7 @@ void AbstractConfig::warnUnknownSettings() void AbstractConfig::reapplyUnknownSettings() { auto unknownSettings2 = std::move(unknownSettings); + unknownSettings = {}; for (auto & s : unknownSettings2) set(s.first, s.second); } @@ -198,6 +207,13 @@ AbstractSetting::AbstractSetting( { } +AbstractSetting::~AbstractSetting() +{ + // Check against a gcc miscompilation causing our constructor + // not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431). + assert(created == 123); +} + nlohmann::json AbstractSetting::toJSON() { return nlohmann::json(toJSONObject()); @@ -219,6 +235,9 @@ void AbstractSetting::convertToArg(Args & args, const std::string & category) { } + +bool AbstractSetting::isOverridden() const { return overridden; } + template<> std::string BaseSetting::parse(const std::string & str) const { return str; @@ -384,11 +403,33 @@ static Path parsePath(const AbstractSetting & s, const std::string & str) return canonPath(str); } +PathSetting::PathSetting(Config * options, + const Path & def, + const std::string & name, + const std::string & description, + const std::set & aliases) + : BaseSetting(def, true, name, description, aliases) +{ + options->addSetting(this); +} + Path PathSetting::parse(const std::string & str) const { return parsePath(*this, str); } + +OptionalPathSetting::OptionalPathSetting(Config * options, + const std::optional & def, + const std::string & name, + const std::string & description, + const std::set & aliases) + : BaseSetting>(def, true, name, description, aliases) +{ + options->addSetting(this); +} + + std::optional OptionalPathSetting::parse(const std::string & str) const { if (str == "") @@ -397,6 +438,11 @@ std::optional OptionalPathSetting::parse(const std::string & str) const return parsePath(*this, str); } +void OptionalPathSetting::operator =(const std::optional & v) +{ + this->assign(v); +} + bool GlobalConfig::set(const std::string & name, const std::string & value) { for (auto & config : *configRegistrations) diff --git a/src/libutil/config.hh b/src/libutil/config.hh index cc8532587..38c3ce0c4 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -36,8 +36,8 @@ namespace nix { * * std::map settings; * config.getSettings(settings); - * config["system"].description == "the current system" - * config["system"].value == "x86_64-linux" + * settings["system"].description == "the current system" + * settings["system"].value == "x86_64-linux" * * * The above retrieves all currently known settings from the `Config` object @@ -52,9 +52,7 @@ class AbstractConfig protected: StringMap unknownSettings; - AbstractConfig(const StringMap & initials = {}) - : unknownSettings(initials) - { } + AbstractConfig(StringMap initials = {}); public: @@ -150,9 +148,6 @@ public: { bool isAlias; AbstractSetting * setting; - SettingData(bool isAlias, AbstractSetting * setting) - : isAlias(isAlias), setting(setting) - { } }; typedef std::map Settings; @@ -163,9 +158,7 @@ private: public: - Config(const StringMap & initials = {}) - : AbstractConfig(initials) - { } + Config(StringMap initials = {}); bool set(const std::string & name, const std::string & value) override; @@ -206,12 +199,7 @@ protected: const std::set & aliases, std::optional experimentalFeature = std::nullopt); - virtual ~AbstractSetting() - { - // Check against a gcc miscompilation causing our constructor - // not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431). - assert(created == 123); - } + virtual ~AbstractSetting(); virtual void set(const std::string & value, bool append = false) = 0; @@ -229,7 +217,7 @@ protected: virtual void convertToArg(Args & args, const std::string & category); - bool isOverridden() const { return overridden; } + bool isOverridden() const; }; /** @@ -324,8 +312,7 @@ public: template std::ostream & operator <<(std::ostream & str, const BaseSetting & opt) { - str << (const T &) opt; - return str; + return str << static_cast(opt); } template @@ -365,11 +352,7 @@ public: const Path & def, const std::string & name, const std::string & description, - const std::set & aliases = {}) - : BaseSetting(def, true, name, description, aliases) - { - options->addSetting(this); - } + const std::set & aliases = {}); Path parse(const std::string & str) const override; @@ -391,15 +374,11 @@ public: const std::optional & def, const std::string & name, const std::string & description, - const std::set & aliases = {}) - : BaseSetting>(def, true, name, description, aliases) - { - options->addSetting(this); - } + const std::set & aliases = {}); std::optional parse(const std::string & str) const override; - void operator =(const std::optional & v) { this->assign(v); } + void operator =(const std::optional & v); }; struct GlobalConfig : public AbstractConfig diff --git a/src/libutil/error.cc b/src/libutil/error.cc index c9d61942a..dd9612471 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -14,6 +14,11 @@ void BaseError::addTrace(std::shared_ptr && e, hintformat hint, boo err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } +void throwExceptionSelfCheck(){ + // This is meant to be caught in initLibUtil() + throw SysError("C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded."); +} + // c++ std::exception descendants must have a 'const char* what()' function. // This stringifies the error and caches it for use by what(), or similarly by msg(). const std::string & BaseError::calcWhat() const @@ -150,6 +155,36 @@ static std::string indent(std::string_view indentFirst, std::string_view indentR return res; } +/** + * A development aid for finding missing positions, to improve error messages. Example use: + * + * NIX_DEVELOPER_SHOW_UNKNOWN_LOCATIONS=1 _NIX_TEST_ACCEPT=1 make tests/lang.sh.test + * git diff -U20 tests + * + */ +static bool printUnknownLocations = getEnv("_NIX_DEVELOPER_SHOW_UNKNOWN_LOCATIONS").has_value(); + +/** + * Print a position, if it is known. + * + * @return true if a position was printed. + */ +static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { + bool hasPos = pos && *pos; + if (hasPos) { + oss << "\n" << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; + + if (auto loc = pos->getCodeLines()) { + oss << "\n"; + printCodeLines(oss, "", *pos, *loc); + oss << "\n"; + } + } else if (printUnknownLocations) { + oss << "\n" << indent << ANSI_BLUE << "at " ANSI_RED << "UNKNOWN LOCATION" << ANSI_NORMAL << "\n"; + } + return hasPos; +} + std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace) { std::string prefix; @@ -198,8 +233,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s std::ostringstream oss; - auto noSource = ANSI_ITALIC " (source not available)" ANSI_NORMAL "\n"; - /* * Traces * ------ @@ -315,34 +348,15 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s oss << "\n" << "… " << trace.hint.str() << "\n"; - if (trace.pos) { + if (printPosMaybe(oss, ellipsisIndent, trace.pos)) count++; - - oss << "\n" << ellipsisIndent << ANSI_BLUE << "at " ANSI_WARNING << *trace.pos << ANSI_NORMAL << ":"; - - if (auto loc = trace.pos->getCodeLines()) { - oss << "\n"; - printCodeLines(oss, "", *trace.pos, *loc); - oss << "\n"; - } else - oss << noSource; - } } oss << "\n" << prefix; } oss << einfo.msg << "\n"; - if (einfo.errPos) { - oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *einfo.errPos << ANSI_NORMAL << ":"; - - if (auto loc = einfo.errPos->getCodeLines()) { - oss << "\n"; - printCodeLines(oss, "", *einfo.errPos, *loc); - oss << "\n"; - } else - oss << noSource; - } + printPosMaybe(oss, "", einfo.errPos); auto suggestions = einfo.suggestions.trim(); if (!suggestions.suggestions.empty()) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 6a0923081..c04dcbd77 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -70,6 +70,13 @@ struct AbstractPos uint32_t line = 0; uint32_t column = 0; + /** + * An AbstractPos may be a "null object", representing an unknown position. + * + * Return true if this position is known. + */ + inline operator bool() const { return line != 0; }; + /** * Return the contents of the source file. */ @@ -214,4 +221,8 @@ public: } }; +/** Throw an exception for the purpose of checking that exception handling works; see 'initLibUtil()'. + */ +void throwExceptionSelfCheck(); + } diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index 0a4041891..c576c0d4d 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -12,7 +12,7 @@ struct ExperimentalFeatureDetails std::string_view description; }; -constexpr std::array xpFeatureDetails = {{ +constexpr std::array xpFeatureDetails = {{ { .tag = Xp::CaDerivations, .name = "ca-derivations", @@ -163,6 +163,8 @@ constexpr std::array xpFeatureDetails = {{ .tag = Xp::ReplFlake, .name = "repl-flake", .description = R"( + *Enabled with [`flakes`](#xp-feature-flakes) since 2.19* + Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands. )", }, @@ -171,7 +173,7 @@ constexpr std::array xpFeatureDetails = {{ .name = "auto-allocate-uids", .description = R"( Allows Nix to automatically pick UIDs for builds, rather than creating - `nixbld*` user accounts. See the [`auto-allocate-uids`](#conf-auto-allocate-uids) setting for details. + `nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details. )", }, { @@ -179,7 +181,7 @@ constexpr std::array xpFeatureDetails = {{ .name = "cgroups", .description = R"( Allows Nix to execute builds inside cgroups. See - the [`use-cgroups`](#conf-use-cgroups) setting for details. + the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details. )", }, { @@ -226,6 +228,13 @@ constexpr std::array xpFeatureDetails = {{ Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store). )", }, + { + .tag = Xp::ConfigurableImpureEnv, + .name = "configurable-impure-env", + .description = R"( + Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting. + )", + } }}; static_assert( @@ -279,7 +288,7 @@ std::set parseFeatures(const std::set & rawFea } MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature) - : Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", showExperimentalFeature(feature)) + : Error("experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", showExperimentalFeature(feature)) , missingFeature(feature) {} diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index ffd204cdd..d006db3ef 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -32,6 +32,7 @@ enum struct ExperimentalFeature ParseTomlTimestamps, ReadOnlyLocalStore, LocalOverlayStore, + ConfigurableImpureEnv, }; /** diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc new file mode 100644 index 000000000..a08a723a4 --- /dev/null +++ b/src/libutil/fs-sink.cc @@ -0,0 +1,77 @@ +#include + +#include "config.hh" +#include "fs-sink.hh" + +namespace nix { + +struct RestoreSinkSettings : Config +{ + Setting preallocateContents{this, false, "preallocate-contents", + "Whether to preallocate files when writing objects with known size."}; +}; + +static RestoreSinkSettings restoreSinkSettings; + +static GlobalConfig::Register r1(&restoreSinkSettings); + + +void RestoreSink::createDirectory(const Path & path) +{ + Path p = dstPath + path; + if (mkdir(p.c_str(), 0777) == -1) + throw SysError("creating directory '%1%'", p); +}; + +void RestoreSink::createRegularFile(const Path & path) +{ + Path p = dstPath + path; + fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666); + if (!fd) throw SysError("creating file '%1%'", p); +} + +void RestoreSink::closeRegularFile() +{ + /* Call close explicitly to make sure the error is checked */ + fd.close(); +} + +void RestoreSink::isExecutable() +{ + struct stat st; + if (fstat(fd.get(), &st) == -1) + throw SysError("fstat"); + if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1) + throw SysError("fchmod"); +} + +void RestoreSink::preallocateContents(uint64_t len) +{ + if (!restoreSinkSettings.preallocateContents) + return; + +#if HAVE_POSIX_FALLOCATE + if (len) { + errno = posix_fallocate(fd.get(), 0, len); + /* Note that EINVAL may indicate that the underlying + filesystem doesn't support preallocation (e.g. on + OpenSolaris). Since preallocation is just an + optimisation, ignore it. */ + if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS) + throw SysError("preallocating file of %1% bytes", len); + } +#endif +} + +void RestoreSink::receiveContents(std::string_view data) +{ + writeFull(fd.get(), data); +} + +void RestoreSink::createSymlink(const Path & path, const std::string & target) +{ + Path p = dstPath + path; + nix::createSymlink(target, p); +} + +} diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh new file mode 100644 index 000000000..6837e2fc4 --- /dev/null +++ b/src/libutil/fs-sink.hh @@ -0,0 +1,42 @@ +#pragma once +///@file + +#include "types.hh" +#include "serialise.hh" + +namespace nix { + +/** + * \todo Fix this API, it sucks. + */ +struct ParseSink +{ + virtual void createDirectory(const Path & path) { }; + + virtual void createRegularFile(const Path & path) { }; + virtual void closeRegularFile() { }; + virtual void isExecutable() { }; + virtual void preallocateContents(uint64_t size) { }; + virtual void receiveContents(std::string_view data) { }; + + virtual void createSymlink(const Path & path, const std::string & target) { }; +}; + +struct RestoreSink : ParseSink +{ + Path dstPath; + AutoCloseFD fd; + + + void createDirectory(const Path & path) override; + + void createRegularFile(const Path & path) override; + void closeRegularFile() override; + void isExecutable() override; + void preallocateContents(uint64_t size) override; + void receiveContents(std::string_view data) override; + + void createSymlink(const Path & path, const std::string & target) override; +}; + +} diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 2c36d9d94..e297c245b 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -111,26 +111,26 @@ static std::string printHash32(const Hash & hash) std::string printHash16or32(const Hash & hash) { assert(hash.type); - return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); + return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false); } -std::string Hash::to_string(Base base, bool includeType) const +std::string Hash::to_string(HashFormat hashFormat, bool includeType) const { std::string s; - if (base == SRI || includeType) { + if (hashFormat == HashFormat::SRI || includeType) { s += printHashType(type); - s += base == SRI ? '-' : ':'; + s += hashFormat == HashFormat::SRI ? '-' : ':'; } - switch (base) { - case Base16: + switch (hashFormat) { + case HashFormat::Base16: s += printHash16(*this); break; - case Base32: + case HashFormat::Base32: s += printHash32(*this); break; - case Base64: - case SRI: + case HashFormat::Base64: + case HashFormat::SRI: s += base64Encode(std::string_view((const char *) hash, hashSize)); break; } @@ -267,7 +267,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional ht) if (!ht) throw BadHash("empty hash requires explicit hash type"); Hash h(*ht); - warn("found empty hash, assuming '%s'", h.to_string(SRI, true)); + warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true)); return h; } else return Hash::parseAny(hashStr, ht); @@ -386,13 +386,48 @@ Hash compressHash(const Hash & hash, unsigned int newSize) } +std::optional parseHashFormatOpt(std::string_view hashFormatName) +{ + if (hashFormatName == "base16") return HashFormat::Base16; + if (hashFormatName == "base32") return HashFormat::Base32; + if (hashFormatName == "base64") return HashFormat::Base64; + if (hashFormatName == "sri") return HashFormat::SRI; + return std::nullopt; +} + +HashFormat parseHashFormat(std::string_view hashFormatName) +{ + auto opt_f = parseHashFormatOpt(hashFormatName); + if (opt_f) + return *opt_f; + throw UsageError("unknown hash format '%1%', expect 'base16', 'base32', 'base64', or 'sri'", hashFormatName); +} + +std::string_view printHashFormat(HashFormat HashFormat) +{ + switch (HashFormat) { + case HashFormat::Base64: + return "base64"; + case HashFormat::Base32: + return "base32"; + case HashFormat::Base16: + return "base16"; + case HashFormat::SRI: + return "sri"; + default: + // illegal hash base enum value internally, as opposed to external input + // which should be validated with nice error message. + assert(false); + } +} + std::optional parseHashTypeOpt(std::string_view s) { if (s == "md5") return htMD5; - else if (s == "sha1") return htSHA1; - else if (s == "sha256") return htSHA256; - else if (s == "sha512") return htSHA512; - else return std::optional {}; + if (s == "sha1") return htSHA1; + if (s == "sha256") return htSHA256; + if (s == "sha512") return htSHA512; + return std::nullopt; } HashType parseHashType(std::string_view s) @@ -401,7 +436,7 @@ HashType parseHashType(std::string_view s) if (opt_h) return *opt_h; else - throw UsageError("unknown hash algorithm '%1%'", s); + throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s); } std::string_view printHashType(HashType ht) diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index ae3ee40f4..cab3e6eca 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -23,7 +23,21 @@ extern std::set hashTypes; extern const std::string base32Chars; -enum Base : int { Base64, Base32, Base16, SRI }; +/** + * @brief Enumeration representing the hash formats. + */ +enum struct HashFormat : int { + /// @brief Base 64 encoding. + /// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4). + Base64, + /// @brief Nix-specific base-32 encoding. @see base32Chars + Base32, + /// @brief Lowercase hexadecimal encoding. @see base16Chars + Base16, + /// @brief ":", format of the SRI integrity attribute. + /// @see W3C recommendation [Subresource Intergrity](https://www.w3.org/TR/SRI/). + SRI +}; struct Hash @@ -114,16 +128,16 @@ public: * or base-64. By default, this is prefixed by the hash type * (e.g. "sha256:"). */ - std::string to_string(Base base, bool includeType) const; + std::string to_string(HashFormat hashFormat, bool includeType) const; std::string gitRev() const { - return to_string(Base16, false); + return to_string(HashFormat::Base16, false); } std::string gitShortRev() const { - return std::string(to_string(Base16, false), 0, 7); + return std::string(to_string(HashFormat::Base16, false), 0, 7); } static Hash dummy; @@ -145,13 +159,17 @@ std::string printHash16or32(const Hash & hash); Hash hashString(HashType ht, std::string_view s); /** - * Compute the hash of the given file. + * Compute the hash of the given file, hashing its contents directly. + * + * (Metadata, such as the executable permission bit, is ignored.) */ Hash hashFile(HashType ht, const Path & path); /** - * Compute the hash of the given path. The hash is defined as - * (essentially) hashString(ht, dumpPath(path)). + * Compute the hash of the given path, serializing as a Nix Archive and + * then hashing that. + * + * The hash is defined as (essentially) hashString(ht, dumpPath(path)). */ typedef std::pair HashResult; HashResult hashPath(HashType ht, const Path & path, @@ -163,6 +181,21 @@ HashResult hashPath(HashType ht, const Path & path, */ Hash compressHash(const Hash & hash, unsigned int newSize); +/** + * Parse a string representing a hash format. + */ +HashFormat parseHashFormat(std::string_view hashFormatName); + +/** + * std::optional version of parseHashFormat that doesn't throw error. + */ +std::optional parseHashFormatOpt(std::string_view hashFormatName); + +/** + * The reverse of parseHashFormat. + */ +std::string_view printHashFormat(HashFormat hashFormat); + /** * Parse a string representing a hash type. */ diff --git a/src/libutil/local.mk b/src/libutil/local.mk index f880c0fc5..81efaafec 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -6,8 +6,13 @@ libutil_DIR := $(d) libutil_SOURCES := $(wildcard $(d)/*.cc) +libutil_CXXFLAGS += -I src/libutil + libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +$(foreach i, $(wildcard $(d)/args/*.hh), \ + $(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644))) + ifeq ($(HAVE_LIBCPUID), 1) libutil_LDFLAGS += -lcpuid endif diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 5a2dd99af..9d7a141b3 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -67,7 +67,7 @@ public: case lvlWarn: c = '4'; break; case lvlNotice: case lvlInfo: c = '5'; break; case lvlTalkative: case lvlChatty: c = '6'; break; - case lvlDebug: case lvlVomit: c = '7'; + case lvlDebug: case lvlVomit: c = '7'; break; default: c = '7'; break; // should not happen, and missing enum case is reported by -Werror=switch-enum } prefix = std::string("<") + c + ">"; @@ -220,8 +220,8 @@ struct JSONLogger : Logger { json["level"] = lvl; json["type"] = type; json["text"] = s; + json["parent"] = parent; addFields(json, fields); - // FIXME: handle parent write(json); } diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc new file mode 100644 index 000000000..48b4fe626 --- /dev/null +++ b/src/libutil/posix-source-accessor.cc @@ -0,0 +1,86 @@ +#include "posix-source-accessor.hh" + +namespace nix { + +void PosixSourceAccessor::readFile( + const CanonPath & path, + Sink & sink, + std::function sizeCallback) +{ + // FIXME: add O_NOFOLLOW since symlinks should be resolved by the + // caller? + AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC); + if (!fd) + throw SysError("opening file '%1%'", path); + + struct stat st; + if (fstat(fd.get(), &st) == -1) + throw SysError("statting file"); + + sizeCallback(st.st_size); + + off_t left = st.st_size; + + std::vector buf(64 * 1024); + while (left) { + checkInterrupt(); + ssize_t rd = read(fd.get(), buf.data(), (size_t) std::min(left, (off_t) buf.size())); + if (rd == -1) { + if (errno != EINTR) + throw SysError("reading from file '%s'", showPath(path)); + } + else if (rd == 0) + throw SysError("unexpected end-of-file reading '%s'", showPath(path)); + else { + assert(rd <= left); + sink({(char *) buf.data(), (size_t) rd}); + left -= rd; + } + } +} + +bool PosixSourceAccessor::pathExists(const CanonPath & path) +{ + return nix::pathExists(path.abs()); +} + +SourceAccessor::Stat PosixSourceAccessor::lstat(const CanonPath & path) +{ + auto st = nix::lstat(path.abs()); + mtime = std::max(mtime, st.st_mtime); + return Stat { + .type = + S_ISREG(st.st_mode) ? tRegular : + S_ISDIR(st.st_mode) ? tDirectory : + S_ISLNK(st.st_mode) ? tSymlink : + tMisc, + .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR + }; +} + +SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path) +{ + DirEntries res; + for (auto & entry : nix::readDirectory(path.abs())) { + std::optional type; + switch (entry.type) { + case DT_REG: type = Type::tRegular; break; + case DT_LNK: type = Type::tSymlink; break; + case DT_DIR: type = Type::tDirectory; break; + } + res.emplace(entry.name, type); + } + return res; +} + +std::string PosixSourceAccessor::readLink(const CanonPath & path) +{ + return nix::readLink(path.abs()); +} + +std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) +{ + return path; +} + +} diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh new file mode 100644 index 000000000..608f96ee2 --- /dev/null +++ b/src/libutil/posix-source-accessor.hh @@ -0,0 +1,34 @@ +#pragma once + +#include "source-accessor.hh" + +namespace nix { + +/** + * A source accessor that uses the Unix filesystem. + */ +struct PosixSourceAccessor : SourceAccessor +{ + /** + * The most recent mtime seen by lstat(). This is a hack to + * support dumpPathAndGetMtime(). Should remove this eventually. + */ + time_t mtime = 0; + + void readFile( + const CanonPath & path, + Sink & sink, + std::function sizeCallback) override; + + bool pathExists(const CanonPath & path) override; + + Stat lstat(const CanonPath & path) override; + + DirEntries readDirectory(const CanonPath & path) override; + + std::string readLink(const CanonPath & path) override; + + std::optional getPhysicalPath(const CanonPath & path) override; +}; + +} diff --git a/src/libutil/source-accessor.cc b/src/libutil/source-accessor.cc new file mode 100644 index 000000000..d168a9667 --- /dev/null +++ b/src/libutil/source-accessor.cc @@ -0,0 +1,58 @@ +#include "source-accessor.hh" +#include "archive.hh" + +namespace nix { + +static std::atomic nextNumber{0}; + +SourceAccessor::SourceAccessor() + : number(++nextNumber) +{ +} + +std::string SourceAccessor::readFile(const CanonPath & path) +{ + StringSink sink; + std::optional size; + readFile(path, sink, [&](uint64_t _size) + { + size = _size; + }); + assert(size && *size == sink.s.size()); + return std::move(sink.s); +} + +void SourceAccessor::readFile( + const CanonPath & path, + Sink & sink, + std::function sizeCallback) +{ + auto s = readFile(path); + sizeCallback(s.size()); + sink(s); +} + +Hash SourceAccessor::hashPath( + const CanonPath & path, + PathFilter & filter, + HashType ht) +{ + HashSink sink(ht); + dumpPath(path, sink, filter); + return sink.finish().first; +} + +std::optional SourceAccessor::maybeLstat(const CanonPath & path) +{ + // FIXME: merge these into one operation. + if (!pathExists(path)) + return {}; + return lstat(path); +} + +std::string SourceAccessor::showPath(const CanonPath & path) +{ + return path.abs(); +} + +} diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh new file mode 100644 index 000000000..fd823aa39 --- /dev/null +++ b/src/libutil/source-accessor.hh @@ -0,0 +1,107 @@ +#pragma once + +#include "canon-path.hh" +#include "hash.hh" + +namespace nix { + +struct Sink; + +/** + * A read-only filesystem abstraction. This is used by the Nix + * evaluator and elsewhere for accessing sources in various + * filesystem-like entities (such as the real filesystem, tarballs or + * Git repositories). + */ +struct SourceAccessor +{ + const size_t number; + + SourceAccessor(); + + virtual ~SourceAccessor() + { } + + /** + * Return the contents of a file as a string. + */ + virtual std::string readFile(const CanonPath & path); + + /** + * Write the contents of a file as a sink. `sizeCallback` must be + * called with the size of the file before any data is written to + * the sink. + * + * Note: subclasses of `SourceAccessor` need to implement at least + * one of the `readFile()` variants. + */ + virtual void readFile( + const CanonPath & path, + Sink & sink, + std::function sizeCallback = [](uint64_t size){}); + + virtual bool pathExists(const CanonPath & path) = 0; + + enum Type { + tRegular, tSymlink, tDirectory, + /** + Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things. + + Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`. + + Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types. + */ + tMisc + }; + + struct Stat + { + Type type = tMisc; + //uint64_t fileSize = 0; // regular files only + bool isExecutable = false; // regular files only + }; + + virtual Stat lstat(const CanonPath & path) = 0; + + std::optional maybeLstat(const CanonPath & path); + + typedef std::optional DirEntry; + + typedef std::map DirEntries; + + virtual DirEntries readDirectory(const CanonPath & path) = 0; + + virtual std::string readLink(const CanonPath & path) = 0; + + virtual void dumpPath( + const CanonPath & path, + Sink & sink, + PathFilter & filter = defaultPathFilter); + + Hash hashPath( + const CanonPath & path, + PathFilter & filter = defaultPathFilter, + HashType ht = htSHA256); + + /** + * Return a corresponding path in the root filesystem, if + * possible. This is only possible for filesystems that are + * materialized in the root filesystem. + */ + virtual std::optional getPhysicalPath(const CanonPath & path) + { return std::nullopt; } + + bool operator == (const SourceAccessor & x) const + { + return number == x.number; + } + + bool operator < (const SourceAccessor & x) const + { + return number < x.number; + } + + virtual std::string showPath(const CanonPath & path); +}; + +} diff --git a/src/libutil/tests/hash.cc b/src/libutil/tests/hash.cc index e4e928b3b..9a5ebbb30 100644 --- a/src/libutil/tests/hash.cc +++ b/src/libutil/tests/hash.cc @@ -18,28 +18,28 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc1321 auto s1 = ""; auto hash = hashString(HashType::htMD5, s1); - ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); } TEST(hashString, testKnownMD5Hashes2) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s2 = "abc"; auto hash = hashString(HashType::htMD5, s2); - ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); } TEST(hashString, testKnownSHA1Hashes1) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abc"; auto hash = hashString(HashType::htSHA1, s); - ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); } TEST(hashString, testKnownSHA1Hashes2) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; auto hash = hashString(HashType::htSHA1, s); - ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); + ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); } TEST(hashString, testKnownSHA256Hashes1) { @@ -47,7 +47,7 @@ namespace nix { auto s = "abc"; auto hash = hashString(HashType::htSHA256, s); - ASSERT_EQ(hash.to_string(Base::Base16, true), + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); } @@ -55,7 +55,7 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; auto hash = hashString(HashType::htSHA256, s); - ASSERT_EQ(hash.to_string(Base::Base16, true), + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); } @@ -63,7 +63,7 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; auto hash = hashString(HashType::htSHA512, s); - ASSERT_EQ(hash.to_string(Base::Base16, true), + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" "454d4423643ce80e2a9ac94fa54ca49f"); @@ -74,11 +74,26 @@ namespace nix { auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; auto hash = hashString(HashType::htSHA512, s); - ASSERT_EQ(hash.to_string(Base::Base16, true), + ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" "c7d329eeb6dd26545e96e55b874be909"); } + + /* ---------------------------------------------------------------------------- + * parseHashFormat, parseHashFormatOpt, printHashFormat + * --------------------------------------------------------------------------*/ + + TEST(hashFormat, testRoundTripPrintParse) { + for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Base32, HashFormat::Base16, HashFormat::SRI}) { + ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat); + ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat); + } + } + + TEST(hashFormat, testParseHashFormatOptException) { + ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt); + } } namespace rc { diff --git a/src/libutil/tests/url.cc b/src/libutil/tests/url.cc index a908631e6..a678dad20 100644 --- a/src/libutil/tests/url.cc +++ b/src/libutil/tests/url.cc @@ -335,4 +335,13 @@ namespace nix { ASSERT_EQ(d, s); } + TEST(percentEncode, yen) { + // https://en.wikipedia.org/wiki/Percent-encoding#Character_data + std::string s = reinterpret_cast(u8"円"); + std::string e = "%E5%86%86"; + + ASSERT_EQ(percentEncode(s), e); + ASSERT_EQ(percentDecode(e), s); + } + } diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 98162b0f7..5c5a30dc2 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -30,7 +30,7 @@ extern std::regex refRegex; /// Instead of defining what a good Git Ref is, we define what a bad Git Ref is /// This is because of the definition of a ref in refs.c in https://github.com/git/git -/// See tests/fetchGitRefs.sh for the full definition +/// See tests/functional/fetchGitRefs.sh for the full definition const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$"; extern std::regex badGitRefRegex; @@ -41,7 +41,4 @@ extern std::regex revRegex; /// A ref or revision, or a ref followed by a revision. const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))"; -const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*"; -extern std::regex flakeIdRegex; - } diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 9e44241ac..9b438e6cd 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -8,7 +8,6 @@ namespace nix { std::regex refRegex(refRegexS, std::regex::ECMAScript); std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript); std::regex revRegex(revRegexS, std::regex::ECMAScript); -std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript); ParsedURL parseURL(const std::string & url) { @@ -44,7 +43,7 @@ ParsedURL parseURL(const std::string & url) .base = base, .scheme = scheme, .authority = authority, - .path = path, + .path = percentDecode(path), .query = decodeQuery(query), .fragment = percentDecode(std::string(fragment)) }; @@ -103,7 +102,7 @@ std::string percentEncode(std::string_view s, std::string_view keep) || keep.find(c) != std::string::npos) res += c; else - res += fmt("%%%02X", (unsigned int) c); + res += fmt("%%%02X", c & 0xFF); return res; } @@ -159,4 +158,21 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme) }; } +std::string fixGitURL(const std::string & url) +{ + std::regex scpRegex("([^/]*)@(.*):(.*)"); + if (!hasPrefix(url, "/") && std::regex_match(url, scpRegex)) + return std::regex_replace(url, scpRegex, "ssh://$1@$2/$3"); + else { + if (url.find("://") == std::string::npos) { + return (ParsedURL { + .scheme = "file", + .authority = "", + .path = url + }).to_string(); + } else + return url; + } +} + } diff --git a/src/libutil/url.hh b/src/libutil/url.hh index d2413ec0e..26c2dcc28 100644 --- a/src/libutil/url.hh +++ b/src/libutil/url.hh @@ -45,4 +45,9 @@ struct ParsedUrlScheme { ParsedUrlScheme parseUrlScheme(std::string_view scheme); +/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes + them by removing the `:` and assuming a scheme of `ssh://`. Also + changes absolute paths into file:// URLs. */ +std::string fixGitURL(const std::string & url); + } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 26f9dc8a8..3b4c181e5 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -48,6 +48,23 @@ extern char * * environ __attribute__((weak)); namespace nix { void initLibUtil() { + // Check that exception handling works. Exception handling has been observed + // not to work on darwin when the linker flags aren't quite right. + // In this case we don't want to expose the user to some unrelated uncaught + // exception, but rather tell them exactly that exception handling is + // broken. + // When exception handling fails, the message tends to be printed by the + // C++ runtime, followed by an abort. + // For example on macOS we might see an error such as + // libc++abi: terminating with uncaught exception of type nix::SysError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. + bool caught = false; + try { + throwExceptionSelfCheck(); + } catch (const nix::Error & _e) { + caught = true; + } + // This is not actually the main point of this check, but let's make sure anyway: + assert(caught); } std::optional getEnv(const std::string & key) @@ -1498,7 +1515,7 @@ bool shouldANSI() { return isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb" - && !getEnv("NO_COLOR").has_value(); + && !(getEnv("NO_COLOR").has_value() || getEnv("NOCOLOR").has_value()); } std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int width) diff --git a/src/libutil/variant-wrapper.hh b/src/libutil/variant-wrapper.hh new file mode 100644 index 000000000..cedcb999c --- /dev/null +++ b/src/libutil/variant-wrapper.hh @@ -0,0 +1,30 @@ +#pragma once +///@file + +// not used, but will be used by callers +#include + +/** + * Force the default versions of all constructors (copy, move, copy + * assignment). + */ +#define FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ + CLASS_NAME(const CLASS_NAME &) = default; \ + CLASS_NAME(CLASS_NAME &) = default; \ + CLASS_NAME(CLASS_NAME &&) = default; \ + \ + CLASS_NAME & operator =(const CLASS_NAME &) = default; \ + CLASS_NAME & operator =(CLASS_NAME &) = default; + +/** + * Make a wrapper constructor. All args are forwarded to the + * construction of the "raw" field. (Which we assume is the only one.) + * + * The moral equivalent of `using Raw::Raw;` + */ +#define MAKE_WRAPPER_CONSTRUCTOR(CLASS_NAME) \ + FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \ + \ + CLASS_NAME(auto &&... arg) \ + : raw(std::forward(arg)...) \ + { } diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 6510df8f0..e62c4f6b1 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -34,13 +34,14 @@ extern char * * environ __attribute__((weak)); */ static std::vector shellwords(const std::string & s) { - std::regex whitespace("^(\\s+).*"); + std::regex whitespace("^\\s+"); auto begin = s.cbegin(); std::vector res; std::string cur; enum state { sBegin, - sQuote + sSingleQuote, + sDoubleQuote }; state st = sBegin; auto it = begin; @@ -50,26 +51,39 @@ static std::vector shellwords(const std::string & s) if (regex_search(it, s.cend(), match, whitespace)) { cur.append(begin, it); res.push_back(cur); - cur.clear(); - it = match[1].second; + it = match[0].second; + if (it == s.cend()) return res; begin = it; + cur.clear(); } } switch (*it) { + case '\'': + if (st != sDoubleQuote) { + cur.append(begin, it); + begin = it + 1; + st = st == sBegin ? sSingleQuote : sBegin; + } + break; case '"': - cur.append(begin, it); - begin = it + 1; - st = st == sBegin ? sQuote : sBegin; + if (st != sSingleQuote) { + cur.append(begin, it); + begin = it + 1; + st = st == sBegin ? sDoubleQuote : sBegin; + } break; case '\\': - /* perl shellwords mostly just treats the next char as part of the string with no special processing */ - cur.append(begin, it); - begin = ++it; + if (st != sSingleQuote) { + /* perl shellwords mostly just treats the next char as part of the string with no special processing */ + cur.append(begin, it); + begin = ++it; + } break; } } + if (st != sBegin) throw Error("unterminated quote in shebang line"); cur.append(begin, it); - if (!cur.empty()) res.push_back(cur); + res.push_back(cur); return res; } @@ -128,7 +142,7 @@ static void main_nix_build(int argc, char * * argv) for (auto line : lines) { line = chomp(line); std::smatch match; - if (std::regex_match(line, match, std::regex("^#!\\s*nix-shell (.*)$"))) + if (std::regex_match(line, match, std::regex("^#!\\s*nix-shell\\s+(.*)$"))) for (const auto & word : shellwords(match[1].str())) args.push_back(word); } @@ -344,7 +358,7 @@ static void main_nix_build(int argc, char * * argv) } } - state->printStats(); + state->maybePrintStats(); auto buildPaths = [&](const std::vector & paths) { /* Note: we do this even when !printMissing to efficiently @@ -393,7 +407,7 @@ static void main_nix_build(int argc, char * * argv) auto bashDrv = drv->requireDrvPath(); pathsToBuild.push_back(DerivedPath::Built { - .drvPath = bashDrv, + .drvPath = makeConstantStorePathRef(bashDrv), .outputs = OutputsSpec::Names {"out"}, }); pathsToCopy.insert(bashDrv); @@ -406,8 +420,22 @@ static void main_nix_build(int argc, char * * argv) } } + std::function, const DerivedPathMap::ChildNode &)> accumDerivedPath; + + accumDerivedPath = [&](ref inputDrv, const DerivedPathMap::ChildNode & inputNode) { + if (!inputNode.value.empty()) + pathsToBuild.push_back(DerivedPath::Built { + .drvPath = inputDrv, + .outputs = OutputsSpec::Names { inputNode.value }, + }); + for (const auto & [outputName, childNode] : inputNode.childMap) + accumDerivedPath( + make_ref(SingleDerivedPath::Built { inputDrv, outputName }), + childNode); + }; + // Build or fetch all dependencies of the derivation. - for (const auto & [inputDrv0, inputOutputs] : drv.inputDrvs) { + for (const auto & [inputDrv0, inputNode] : drv.inputDrvs.map) { // To get around lambda capturing restrictions in the // standard. const auto & inputDrv = inputDrv0; @@ -416,10 +444,7 @@ static void main_nix_build(int argc, char * * argv) return !std::regex_search(store->printStorePath(inputDrv), std::regex(exclude)); })) { - pathsToBuild.push_back(DerivedPath::Built { - .drvPath = inputDrv, - .outputs = OutputsSpec::Names { inputOutputs }, - }); + accumDerivedPath(makeConstantStorePathRef(inputDrv), inputNode); pathsToCopy.insert(inputDrv); } } @@ -482,13 +507,21 @@ static void main_nix_build(int argc, char * * argv) if (env.count("__json")) { StorePathSet inputs; - for (auto & [depDrvPath, wantedDepOutputs] : drv.inputDrvs) { - auto outputs = evalStore->queryPartialDerivationOutputMap(depDrvPath); - for (auto & i : wantedDepOutputs) { + + std::function::ChildNode &)> accumInputClosure; + + accumInputClosure = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { + auto outputs = evalStore->queryPartialDerivationOutputMap(inputDrv); + for (auto & i : inputNode.value) { auto o = outputs.at(i); store->computeFSClosure(*o, inputs); } - } + for (const auto & [outputName, childNode] : inputNode.childMap) + accumInputClosure(*outputs.at(outputName), childNode); + }; + + for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) + accumInputClosure(inputDrv, inputNode); ParsedDerivation parsedDrv(drvInfo.requireDrvPath(), drv); @@ -590,7 +623,10 @@ static void main_nix_build(int argc, char * * argv) if (outputName == "") throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath)); - pathsToBuild.push_back(DerivedPath::Built{drvPath, OutputsSpec::Names{outputName}}); + pathsToBuild.push_back(DerivedPath::Built{ + .drvPath = makeConstantStorePathRef(drvPath), + .outputs = OutputsSpec::Names{outputName}, + }); pathsToBuildOrdered.push_back({drvPath, {outputName}}); drvsToCopy.insert(drvPath); diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index c1c8edd1d..4504441fa 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -4,8 +4,9 @@ #include "filetransfer.hh" #include "store-api.hh" #include "legacy.hh" -#include "fetchers.hh" +#include "eval-settings.hh" // for defexpr #include "util.hh" +#include "tarball.hh" #include #include @@ -165,7 +166,7 @@ static int main_nix_channel(int argc, char ** argv) // Figure out the name of the `.nix-channels' file to use auto home = getHome(); channelsList = settings.useXDGBaseDirectories ? createNixStateDir() + "/channels" : home + "/.nix-channels"; - nixDefExpr = settings.useXDGBaseDirectories ? createNixStateDir() + "/defexpr" : home + "/.nix-defexpr"; + nixDefExpr = getNixDefExpr(); // Figure out the name of the channels profile. profile = profilesDir() + "/channels"; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 91b073b49..01742daa8 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -15,6 +15,7 @@ #include "value-to-json.hh" #include "xml-writer.hh" #include "legacy.hh" +#include "eval-settings.hh" // for defexpr #include #include @@ -481,7 +482,7 @@ static void printMissing(EvalState & state, DrvInfos & elems) for (auto & i : elems) if (auto drvPath = i.queryDrvPath()) targets.push_back(DerivedPath::Built{ - .drvPath = *drvPath, + .drvPath = makeConstantStorePathRef(*drvPath), .outputs = OutputsSpec::All { }, }); else @@ -759,7 +760,7 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) std::vector paths { drvPath ? (DerivedPath) (DerivedPath::Built { - .drvPath = *drvPath, + .drvPath = makeConstantStorePathRef(*drvPath), .outputs = OutputsSpec::All { }, }) : (DerivedPath) (DerivedPath::Opaque { @@ -1227,7 +1228,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) else { if (v->type() == nString) { attrs2["type"] = "string"; - attrs2["value"] = v->string.s; + attrs2["value"] = v->c_str(); xml.writeEmptyElement("meta", attrs2); } else if (v->type() == nInt) { attrs2["type"] = "int"; @@ -1247,7 +1248,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) for (auto elem : v->listItems()) { if (elem->type() != nString) continue; XMLAttrs attrs3; - attrs3["value"] = elem->string.s; + attrs3["value"] = elem->c_str(); xml.writeEmptyElement("string", attrs3); } } else if (v->type() == nAttrs) { @@ -1259,7 +1260,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if(a.value->type() != nString) continue; XMLAttrs attrs3; attrs3["type"] = globals.state->symbols[i.name]; - attrs3["value"] = a.value->string.s; + attrs3["value"] = a.value->c_str(); xml.writeEmptyElement("string", attrs3); } } @@ -1399,7 +1400,7 @@ static int main_nix_env(int argc, char * * argv) globals.instSource.type = srcUnknown; globals.instSource.systemFilter = "*"; - Path nixExprPath = settings.useXDGBaseDirectories ? createNixStateDir() + "/defexpr" : getHome() + "/.nix-defexpr"; + Path nixExprPath = getNixDefExpr(); if (!pathExists(nixExprPath)) { try { @@ -1530,7 +1531,7 @@ static int main_nix_env(int argc, char * * argv) op(globals, std::move(opFlags), std::move(opArgs)); - globals.state->printStats(); + globals.state->maybePrintStats(); return 0; } diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 446b27e66..d40196497 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -189,7 +189,7 @@ static int main_nix_instantiate(int argc, char * * argv) evalOnly, outputKind, xmlOutputSourceLocation, e); } - state->printStats(); + state->maybePrintStats(); return 0; } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 94956df66..e4dd94585 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -9,10 +9,9 @@ #include "local-store.hh" #include "monitor-fd.hh" #include "serve-protocol.hh" +#include "serve-protocol-impl.hh" #include "shared.hh" #include "util.hh" -#include "worker-protocol.hh" -#include "worker-protocol-impl.hh" #include "graphml.hh" #include "legacy.hh" #include "path-with-outputs.hh" @@ -283,7 +282,7 @@ static void opQuery(Strings opFlags, Strings opArgs) { enum QueryType { qOutputs, qRequisites, qReferences, qReferrers - , qReferrersClosure, qDeriver, qBinding, qHash, qSize + , qReferrersClosure, qDeriver, qValidDerivers, qBinding, qHash, qSize , qTree, qGraph, qGraphML, qResolve, qRoots }; std::optional query; bool useOutput = false; @@ -299,6 +298,7 @@ static void opQuery(Strings opFlags, Strings opArgs) else if (i == "--referrers" || i == "--referers") query = qReferrers; else if (i == "--referrers-closure" || i == "--referers-closure") query = qReferrersClosure; else if (i == "--deriver" || i == "-d") query = qDeriver; + else if (i == "--valid-derivers") query = qValidDerivers; else if (i == "--binding" || i == "-b") { if (opArgs.size() == 0) throw UsageError("expected binding name"); @@ -372,6 +372,21 @@ static void opQuery(Strings opFlags, Strings opArgs) } break; + case qValidDerivers: { + StorePathSet result; + for (auto & i : opArgs) { + auto derivers = store->queryValidDerivers(store->followLinksToStorePath(i)); + for (const auto &i: derivers) { + result.insert(i); + } + } + auto sorted = store->topoSortPaths(result); + for (StorePaths::reverse_iterator i = sorted.rbegin(); + i != sorted.rend(); ++i) + cout << fmt("%s\n", store->printStorePath(*i)); + break; + } + case qBinding: for (auto & i : opArgs) { auto path = useDeriver(store->followLinksToStorePath(i)); @@ -391,7 +406,7 @@ static void opQuery(Strings opFlags, Strings opArgs) auto info = store->queryPathInfo(j); if (query == qHash) { assert(info->narHash.type == htSHA256); - cout << fmt("%s\n", info->narHash.to_string(Base32, true)); + cout << fmt("%s\n", info->narHash.to_string(HashFormat::Base32, true)); } else if (query == qSize) cout << fmt("%d\n", info->narSize); } @@ -754,8 +769,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) if (current.first != info->narHash) { printError("path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(path), - info->narHash.to_string(Base32, true), - current.first.to_string(Base32, true)); + info->narHash.to_string(HashFormat::Base32, true), + current.first.to_string(HashFormat::Base32, true)); status = 1; } } @@ -803,10 +818,16 @@ static void opServe(Strings opFlags, Strings opArgs) if (magic != SERVE_MAGIC_1) throw Error("protocol mismatch"); out << SERVE_MAGIC_2 << SERVE_PROTOCOL_VERSION; out.flush(); - unsigned int clientVersion = readInt(in); + ServeProto::Version clientVersion = readInt(in); - WorkerProto::ReadConn rconn { .from = in }; - WorkerProto::WriteConn wconn { .to = out }; + ServeProto::ReadConn rconn { + .from = in, + .version = clientVersion, + }; + ServeProto::WriteConn wconn { + .to = out, + .version = clientVersion, + }; auto getBuildSettings = [&]() { // FIXME: changing options here doesn't work if we're @@ -851,7 +872,7 @@ static void opServe(Strings opFlags, Strings opArgs) case ServeProto::Command::QueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - auto paths = WorkerProto::Serialise::read(*store, rconn); + auto paths = ServeProto::Serialise::read(*store, rconn); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -860,24 +881,24 @@ static void opServe(Strings opFlags, Strings opArgs) store->substitutePaths(paths); } - WorkerProto::write(*store, wconn, store->queryValidPaths(paths)); + ServeProto::write(*store, wconn, store->queryValidPaths(paths)); break; } case ServeProto::Command::QueryPathInfos: { - auto paths = WorkerProto::Serialise::read(*store, rconn); + auto paths = ServeProto::Serialise::read(*store, rconn); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); out << store->printStorePath(info->path) << (info->deriver ? store->printStorePath(*info->deriver) : ""); - WorkerProto::write(*store, wconn, info->references); + ServeProto::write(*store, wconn, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 4) - out << info->narHash.to_string(Base32, true) + out << info->narHash.to_string(HashFormat::Base32, true) << renderContentAddress(info->ca) << info->sigs; } catch (InvalidPath &) { @@ -900,7 +921,7 @@ static void opServe(Strings opFlags, Strings opArgs) case ServeProto::Command::ExportPaths: { readInt(in); // obsolete - store->exportPaths(WorkerProto::Serialise::read(*store, rconn), out); + store->exportPaths(ServeProto::Serialise::read(*store, rconn), out); break; } @@ -938,26 +959,16 @@ static void opServe(Strings opFlags, Strings opArgs) MonitorFdHup monitor(in.fd); auto status = store->buildDerivation(drvPath, drv); - out << status.status << status.errorMsg; - - if (GET_PROTOCOL_MINOR(clientVersion) >= 3) - out << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime; - if (GET_PROTOCOL_MINOR(clientVersion) >= 6) { - DrvOutputs builtOutputs; - for (auto & [output, realisation] : status.builtOutputs) - builtOutputs.insert_or_assign(realisation.id, realisation); - WorkerProto::write(*store, wconn, builtOutputs); - } - + ServeProto::write(*store, wconn, status); break; } case ServeProto::Command::QueryClosure: { bool includeOutputs = readInt(in); StorePathSet closure; - store->computeFSClosure(WorkerProto::Serialise::read(*store, rconn), + store->computeFSClosure(ServeProto::Serialise::read(*store, rconn), closure, false, includeOutputs); - WorkerProto::write(*store, wconn, closure); + ServeProto::write(*store, wconn, closure); break; } @@ -972,7 +983,7 @@ static void opServe(Strings opFlags, Strings opArgs) }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = WorkerProto::Serialise::read(*store, rconn); + info.references = ServeProto::Serialise::read(*store, rconn); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); info.ca = ContentAddress::parseOpt(readString(in)); diff --git a/src/nix/app.cc b/src/nix/app.cc index e0f68b4fc..935ed18ec 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -20,14 +20,26 @@ StringPairs resolveRewrites( const std::vector & dependencies) { StringPairs res; - for (auto & dep : dependencies) - if (auto drvDep = std::get_if(&dep.path)) - if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) - for (auto & [ outputName, outputPath ] : drvDep->outputs) - res.emplace( - DownstreamPlaceholder::unknownCaOutput(drvDep->drvPath, outputName).render(), - store.printStorePath(outputPath) - ); + if (!experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { + return res; + } + for (auto &dep: dependencies) { + auto drvDep = std::get_if(&dep.path); + if (!drvDep) { + continue; + } + + for (const auto & [ outputName, outputPath ] : drvDep->outputs) { + res.emplace( + DownstreamPlaceholder::fromSingleDerivedPathBuilt( + SingleDerivedPath::Built { + .drvPath = make_ref(drvDep->drvPath->discardOutputPath()), + .output = outputName, + }).render(), + store.printStorePath(outputPath) + ); + } + } return res; } @@ -50,11 +62,11 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) auto type = cursor->getAttr("type")->getString(); - std::string expected = !attrPath.empty() && + std::string expectedType = !attrPath.empty() && (state.symbols[attrPath[0]] == "apps" || state.symbols[attrPath[0]] == "defaultApp") ? "app" : "derivation"; - if (type != expected) - throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expected); + if (type != expectedType) + throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expectedType); if (type == "app") { auto [program, context] = cursor->getAttr("program")->getStringWithContext(); @@ -65,7 +77,7 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) [&](const NixStringContextElem::DrvDeep & d) -> DerivedPath { /* We want all outputs of the drv */ return DerivedPath::Built { - .drvPath = d.drvPath, + .drvPath = makeConstantStorePathRef(d.drvPath), .outputs = OutputsSpec::All {}, }; }, @@ -80,10 +92,10 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) .path = o.path, }; }, - }, c.raw())); + }, c.raw)); } - return UnresolvedApp{App { + return UnresolvedApp { App { .context = std::move(context2), .program = program, }}; @@ -106,7 +118,7 @@ UnresolvedApp InstallableValue::toApp(EvalState & state) auto program = outPath + "/bin/" + mainProgram; return UnresolvedApp { App { .context = { DerivedPath::Built { - .drvPath = drvPath, + .drvPath = makeConstantStorePathRef(drvPath), .outputs = OutputsSpec::Names { outputName }, } }, .program = program, diff --git a/src/nix/build.cc b/src/nix/build.cc index ad1842a4e..479100186 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -9,18 +9,18 @@ using namespace nix; -nlohmann::json derivedPathsToJSON(const DerivedPaths & paths, ref store) +static nlohmann::json derivedPathsToJSON(const DerivedPaths & paths, Store & store) { auto res = nlohmann::json::array(); for (auto & t : paths) { - std::visit([&res, store](const auto & t) { + std::visit([&](const auto & t) { res.push_back(t.toJSON(store)); }, t.raw()); } return res; } -nlohmann::json builtPathsWithResultToJSON(const std::vector & buildables, ref store) +static nlohmann::json builtPathsWithResultToJSON(const std::vector & buildables, const Store & store) { auto res = nlohmann::json::array(); for (auto & b : buildables) { @@ -125,7 +125,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile printMissing(store, pathsToBuild, lvlError); if (json) - logger->cout("%s", derivedPathsToJSON(pathsToBuild, store).dump()); + logger->cout("%s", derivedPathsToJSON(pathsToBuild, *store).dump()); return; } @@ -136,7 +136,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile installables, repair ? bmRepair : buildMode); - if (json) logger->cout("%s", builtPathsWithResultToJSON(buildables, store).dump()); + if (json) logger->cout("%s", builtPathsWithResultToJSON(buildables, *store).dump()); if (outLink != "") if (auto store2 = store.dynamic_pointer_cast()) diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index bcc00d490..504e35c81 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -21,8 +21,8 @@ struct CmdBundle : InstallableValueCommand .description = fmt("Use a custom bundler instead of the default (`%s`).", bundler), .labels = {"flake-url"}, .handler = {&bundler}, - .completer = {[&](size_t, std::string_view prefix) { - completeFlakeRef(getStore(), prefix); + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); }} }); @@ -80,7 +80,7 @@ struct CmdBundle : InstallableValueCommand auto [bundlerFlakeRef, bundlerName, extendedOutputsSpec] = parseFlakeRefWithFragmentAndExtendedOutputsSpec(bundler, absPath(".")); const flake::LockFlags lockFlags{ .writeLockFile = false }; InstallableFlake bundler{this, - evalState, std::move(bundlerFlakeRef), bundlerName, extendedOutputsSpec, + evalState, std::move(bundlerFlakeRef), bundlerName, std::move(extendedOutputsSpec), {"bundlers." + settings.thisSystem.get() + ".default", "defaultBundler." + settings.thisSystem.get() }, @@ -109,7 +109,7 @@ struct CmdBundle : InstallableValueCommand store->buildPaths({ DerivedPath::Built { - .drvPath = drvPath, + .drvPath = makeConstantStorePathRef(drvPath), .outputs = OutputsSpec::All { }, }, }); diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index 1511f9e6e..af428018a 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -500,6 +500,45 @@ static RegisterLegacyCommand r_nix_daemon("nix-daemon", main_nix_daemon); struct CmdDaemon : StoreCommand { + bool stdio = false; + std::optional isTrustedOpt = std::nullopt; + + CmdDaemon() + { + addFlag({ + .longName = "stdio", + .description = "Attach to standard I/O, instead of trying to bind to a UNIX socket.", + .handler = {&stdio, true}, + }); + + addFlag({ + .longName = "force-trusted", + .description = "Force the daemon to trust connecting clients.", + .handler = {[&]() { + isTrustedOpt = Trusted; + }}, + .experimentalFeature = Xp::DaemonTrustOverride, + }); + + addFlag({ + .longName = "force-untrusted", + .description = "Force the daemon to not trust connecting clients. The connection will be processed by the receiving daemon before forwarding commands.", + .handler = {[&]() { + isTrustedOpt = NotTrusted; + }}, + .experimentalFeature = Xp::DaemonTrustOverride, + }); + + addFlag({ + .longName = "default-trust", + .description = "Use Nix's default trust.", + .handler = {[&]() { + isTrustedOpt = std::nullopt; + }}, + .experimentalFeature = Xp::DaemonTrustOverride, + }); + } + std::string description() override { return "daemon to perform store operations on behalf of non-root clients"; @@ -516,7 +555,7 @@ struct CmdDaemon : StoreCommand void run(ref store) override { - runDaemon(false, std::nullopt); + runDaemon(stdio, isTrustedOpt); } }; diff --git a/src/nix/daemon.md b/src/nix/daemon.md index d5cdadf08..b1ea850ed 100644 --- a/src/nix/daemon.md +++ b/src/nix/daemon.md @@ -1,20 +1,44 @@ R""( -# Example +# Examples -* Run the daemon in the foreground: +* Run the daemon: ```console # nix daemon ``` +* Run the daemon and listen on standard I/O instead of binding to a UNIX socket: + + ```console + # nix daemon --stdio + ``` + +* Run the daemon and force all connections to be trusted: + + ```console + # nix daemon --force-trusted + ``` + +* Run the daemon and force all connections to be untrusted: + + ```console + # nix daemon --force-untrusted + ``` + +* Run the daemon, listen on standard I/O, and force all connections to use Nix's default trust: + + ```console + # nix daemon --stdio --default-trust + ``` + # Description This command runs the Nix daemon, which is a required component in multi-user Nix installations. It runs build tasks and other operations on the Nix store on behalf of non-root users. Usually you don't run the daemon directly; instead it's managed by a service -management framework such as `systemd`. +management framework such as `systemd` on Linux, or `launchctl` on Darwin. Note that this daemon does not fork into the background. diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 195eeaa21..b080a3939 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -8,9 +8,12 @@ #include "derivations.hh" #include "progress-bar.hh" #include "run.hh" +#include "util.hh" +#include #include #include +#include using namespace nix; @@ -51,6 +54,7 @@ struct BuildEnvironment std::map vars; std::map bashFunctions; + std::optional> structuredAttrs; static BuildEnvironment fromJSON(std::string_view in) { @@ -74,6 +78,10 @@ struct BuildEnvironment res.bashFunctions.insert({name, def}); } + if (json.contains("structuredAttrs")) { + res.structuredAttrs = {json["structuredAttrs"][".attrs.json"], json["structuredAttrs"][".attrs.sh"]}; + } + return res; } @@ -102,6 +110,13 @@ struct BuildEnvironment res["bashFunctions"] = bashFunctions; + if (providesStructuredAttrs()) { + auto contents = nlohmann::json::object(); + contents[".attrs.sh"] = getAttrsSH(); + contents[".attrs.json"] = getAttrsJSON(); + res["structuredAttrs"] = std::move(contents); + } + auto json = res.dump(); assert(BuildEnvironment::fromJSON(json) == *this); @@ -109,6 +124,23 @@ struct BuildEnvironment return json; } + bool providesStructuredAttrs() const + { + return structuredAttrs.has_value(); + } + + std::string getAttrsJSON() const + { + assert(providesStructuredAttrs()); + return structuredAttrs->first; + } + + std::string getAttrsSH() const + { + assert(providesStructuredAttrs()); + return structuredAttrs->second; + } + void toBash(std::ostream & out, const std::set & ignoreVars) const { for (auto & [name, value] : vars) { @@ -235,7 +267,7 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore /* Build the derivation. */ store->buildPaths( { DerivedPath::Built { - .drvPath = shellDrvPath, + .drvPath = makeConstantStorePathRef(shellDrvPath), .outputs = OutputsSpec::All { }, }}, bmNormal, evalStore); @@ -291,6 +323,7 @@ struct Common : InstallableCommand, MixProfile std::string makeRcScript( ref store, const BuildEnvironment & buildEnvironment, + const Path & tmpDir, const Path & outputsDir = absPath(".") + "/outputs") { // A list of colon-separated environment variables that should be @@ -353,9 +386,48 @@ struct Common : InstallableCommand, MixProfile } } + if (buildEnvironment.providesStructuredAttrs()) { + fixupStructuredAttrs( + "sh", + "NIX_ATTRS_SH_FILE", + buildEnvironment.getAttrsSH(), + rewrites, + buildEnvironment, + tmpDir + ); + fixupStructuredAttrs( + "json", + "NIX_ATTRS_JSON_FILE", + buildEnvironment.getAttrsJSON(), + rewrites, + buildEnvironment, + tmpDir + ); + } + return rewriteStrings(script, rewrites); } + /** + * Replace the value of NIX_ATTRS_*_FILE (`/build/.attrs.*`) with a tmp file + * that's accessible from the interactive shell session. + */ + void fixupStructuredAttrs( + const std::string & ext, + const std::string & envVar, + const std::string & content, + StringMap & rewrites, + const BuildEnvironment & buildEnvironment, + const Path & tmpDir) + { + auto targetFilePath = tmpDir + "/.attrs." + ext; + writeFile(targetFilePath, content); + + auto fileInBuilderEnv = buildEnvironment.vars.find(envVar); + assert(fileInBuilderEnv != buildEnvironment.vars.end()); + rewrites.insert({BuildEnvironment::getString(fileInBuilderEnv->second), targetFilePath}); + } + Strings getDefaultFlakeAttrPaths() override { Strings paths{ @@ -487,7 +559,9 @@ struct CmdDevelop : Common, MixEnvironment auto [rcFileFd, rcFilePath] = createTempFile("nix-shell"); - auto script = makeRcScript(store, buildEnvironment); + AutoDelete tmpDir(createTempDir("", "nix-develop"), true); + + auto script = makeRcScript(store, buildEnvironment, (Path) tmpDir); if (verbosity >= lvlDebug) script += "set -x\n"; @@ -547,7 +621,7 @@ struct CmdDevelop : Common, MixEnvironment state, std::move(nixpkgs), "bashInteractive", - DefaultOutputs(), + ExtendedOutputsSpec::Default(), Strings{}, Strings{"legacyPackages." + settings.thisSystem.get() + "."}, nixpkgsLockFlags); @@ -615,10 +689,12 @@ struct CmdPrintDevEnv : Common, MixJSON stopProgressBar(); - logger->writeToStdout( - json - ? buildEnvironment.toJSON() - : makeRcScript(store, buildEnvironment)); + if (json) { + logger->writeToStdout(buildEnvironment.toJSON()); + } else { + AutoDelete tmpDir(createTempDir("", "nix-dev-env"), true); + logger->writeToStdout(makeRcScript(store, buildEnvironment, tmpDir)); + } } }; diff --git a/src/nix/develop.md b/src/nix/develop.md index 1b5a8aeba..c49b39669 100644 --- a/src/nix/develop.md +++ b/src/nix/develop.md @@ -69,7 +69,7 @@ R""( * Run a series of script commands: ```console - # nix develop --command bash --command "mkdir build && cmake .. && make" + # nix develop --command bash -c "mkdir build && cmake .. && make" ``` # Description diff --git a/src/nix/eval.cc b/src/nix/eval.cc index d880bef0a..b34af34e0 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -85,7 +85,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption state->forceValue(v, pos); if (v.type() == nString) // FIXME: disallow strings with contexts? - writeFile(path, v.string.s); + writeFile(path, v.string_view()); else if (v.type() == nAttrs) { if (mkdir(path.c_str(), 0777) == -1) throw SysError("creating directory '%s'", path); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 3ce1de44a..0116eff2e 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -36,8 +36,8 @@ public: .label = "flake-url", .optional = true, .handler = {&flakeUrl}, - .completer = {[&](size_t, std::string_view prefix) { - completeFlakeRef(getStore(), prefix); + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); }} }); } @@ -52,9 +52,12 @@ public: return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags); } - std::vector getFlakesForCompletion() override + std::vector getFlakeRefsForCompletion() override { - return {flakeUrl}; + return { + // Like getFlakeRef but with expandTilde calld first + parseFlakeRef(expandTilde(flakeUrl), absPath(".")) + }; } }; @@ -179,14 +182,14 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs()); if (auto rev = flake.lockedRef.input.getRev()) - j["revision"] = rev->to_string(Base16, false); + j["revision"] = rev->to_string(HashFormat::Base16, false); if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev")) j["dirtyRevision"] = *dirtyRev; if (auto revCount = flake.lockedRef.input.getRevCount()) j["revCount"] = *revCount; if (auto lastModified = flake.lockedRef.input.getLastModified()) j["lastModified"] = *lastModified; - j["path"] = store->printStorePath(flake.sourceInfo->storePath); + j["path"] = store->printStorePath(flake.storePath); j["locks"] = lockedFlake.lockFile.toJSON(); logger->cout("%s", j.dump()); } else { @@ -202,11 +205,11 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON *flake.description); logger->cout( ANSI_BOLD "Path:" ANSI_NORMAL " %s", - store->printStorePath(flake.sourceInfo->storePath)); + store->printStorePath(flake.storePath)); if (auto rev = flake.lockedRef.input.getRev()) logger->cout( ANSI_BOLD "Revision:" ANSI_NORMAL " %s", - rev->to_string(Base16, false)); + rev->to_string(HashFormat::Base16, false)); if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev")) logger->cout( ANSI_BOLD "Revision:" ANSI_NORMAL " %s", @@ -233,9 +236,13 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON bool last = i + 1 == node.inputs.size(); if (auto lockedNode = std::get_if<0>(&input.second)) { - logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s", + std::string lastModifiedStr = ""; + if (auto lastModified = (*lockedNode)->lockedRef.input.getLastModified()) + lastModifiedStr = fmt(" (%s)", std::put_time(std::gmtime(&*lastModified), "%F %T")); + logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s%s", prefix + (last ? treeLast : treeConn), input.first, - (*lockedNode)->lockedRef); + (*lockedNode)->lockedRef, + lastModifiedStr); bool firstVisit = visited.insert(*lockedNode).second; @@ -544,9 +551,9 @@ struct CmdFlakeCheck : FlakeCommand *attr2.value, attr2.pos); if (drvPath && attr_name == settings.thisSystem.get()) { drvPaths.push_back(DerivedPath::Built { - .drvPath = *drvPath, - .outputs = OutputsSpec::All { }, - }); + .drvPath = makeConstantStorePathRef(*drvPath), + .outputs = OutputsSpec::All { }, + }); } } } @@ -758,8 +765,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand .description = "The template to use.", .labels = {"template"}, .handler = {&templateUrl}, - .completer = {[&](size_t, std::string_view prefix) { + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRefWithFragment( + completions, getEvalState(), lockFlags, defaultTemplateAttrPathsPrefixes, @@ -778,7 +786,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath(".")); auto installable = InstallableFlake(nullptr, - evalState, std::move(templateFlakeRef), templateName, DefaultOutputs(), + evalState, std::move(templateFlakeRef), templateName, ExtendedOutputsSpec::Default(), defaultTemplateAttrPaths, defaultTemplateAttrPathsPrefixes, lockFlags); @@ -972,7 +980,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun StorePathSet sources; - sources.insert(flake.flake.sourceInfo->storePath); + sources.insert(flake.flake.storePath); // FIXME: use graph output, handle cycles. std::function traverse; @@ -984,7 +992,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun auto storePath = dryRun ? (*inputNode)->lockedRef.input.computeStorePath(*store) - : (*inputNode)->lockedRef.input.fetch(store).first.storePath; + : (*inputNode)->lockedRef.input.fetch(store).first; if (json) { auto& jsonObj3 = jsonObj2[inputName]; jsonObj3["path"] = store->printStorePath(storePath); @@ -1001,7 +1009,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun if (json) { nlohmann::json jsonRoot = { - {"path", store->printStorePath(flake.flake.sourceInfo->storePath)}, + {"path", store->printStorePath(flake.flake.storePath)}, {"inputs", traverse(*flake.lockFile.root)}, }; logger->cout("%s", jsonRoot); @@ -1335,19 +1343,21 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON { auto originalRef = getFlakeRef(); auto resolvedRef = originalRef.resolve(store); - auto [tree, lockedRef] = resolvedRef.fetchTree(store); - auto hash = store->queryPathInfo(tree.storePath)->narHash; + auto [storePath, lockedRef] = resolvedRef.fetchTree(store); + auto hash = store->queryPathInfo(storePath)->narHash; if (json) { auto res = nlohmann::json::object(); - res["storePath"] = store->printStorePath(tree.storePath); - res["hash"] = hash.to_string(SRI, true); + res["storePath"] = store->printStorePath(storePath); + res["hash"] = hash.to_string(HashFormat::SRI, true); + res["original"] = fetchers::attrsToJSON(resolvedRef.toAttrs()); + res["locked"] = fetchers::attrsToJSON(lockedRef.toAttrs()); logger->cout(res.dump()); } else { notice("Downloaded '%s' to '%s' (hash '%s').", lockedRef.to_string(), - store->printStorePath(tree.storePath), - hash.to_string(SRI, true)); + store->printStorePath(storePath), + hash.to_string(HashFormat::SRI, true)); } } }; diff --git a/src/nix/flake.md b/src/nix/flake.md index 92f477917..f08648417 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -67,6 +67,11 @@ inputs.nixpkgs = { }; ``` +Following [RFC 3986](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1), +characters outside of the allowed range (i.e. neither [reserved characters](https://datatracker.ietf.org/doc/html/rfc3986#section-2.2) +nor [unreserved characters](https://datatracker.ietf.org/doc/html/rfc3986#section-2.3)) +must be percent-encoded. + ### Examples Here are some examples of flake references in their URL-like representation: @@ -103,10 +108,14 @@ The semantic of such a path is as follows: 2. The filesystem root (/), or 3. A folder on a different mount point. +Contrary to URL-like references, path-like flake references can contain arbitrary unicode characters (except `#` and `?`). + ### Examples * `.`: The flake to which the current directory belongs to. * `/home/alice/src/patchelf`: A flake in some other directory. +* `./../sub directory/with Ûñî©ôδ€`: A flake in another relative directory that + has Unicode characters in its name. ## Flake reference attributes @@ -173,6 +182,12 @@ Currently the `type` attribute can be one of the following: git(+http|+https|+ssh|+git|+file|):(//)?(\?)? ``` + or + + ``` + @: + ``` + The `ref` attribute defaults to resolving the `HEAD` reference. The `rev` attribute must denote a commit that exists in the branch diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh index a7a8a01b9..832cc2f11 100644 --- a/src/nix/get-env.sh +++ b/src/nix/get-env.sh @@ -1,5 +1,5 @@ set -e -if [ -e .attrs.sh ]; then source .attrs.sh; fi +if [ -e "$NIX_ATTRS_SH_FILE" ]; then source "$NIX_ATTRS_SH_FILE"; fi export IN_NIX_SHELL=impure export dontAddDisableDepTrack=1 @@ -101,7 +101,21 @@ __dumpEnv() { printf "}" done < <(printf "%s\n" "$__vars") - printf '\n }\n}' + printf '\n }' + + if [ -e "$NIX_ATTRS_SH_FILE" ]; then + printf ',\n "structuredAttrs": {\n ' + __escapeString ".attrs.sh" + printf ': ' + __escapeString "$(<"$NIX_ATTRS_SH_FILE")" + printf ',\n ' + __escapeString ".attrs.json" + printf ': ' + __escapeString "$(<"$NIX_ATTRS_JSON_FILE")" + printf '\n }' + fi + + printf '\n}' } __escapeString() { @@ -117,7 +131,7 @@ __escapeString() { # In case of `__structuredAttrs = true;` the list of outputs is an associative # array with a format like `outname => /nix/store/hash-drvname-outname`, so `__olist` # must contain the array's keys (hence `${!...[@]}`) in this case. -if [ -e .attrs.sh ]; then +if [ -e "$NIX_ATTRS_SH_FILE" ]; then __olist="${!outputs[@]}" else __olist=$outputs diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 9feca9345..d6595dcca 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -11,7 +11,7 @@ using namespace nix; struct CmdHashBase : Command { FileIngestionMethod mode; - Base base = SRI; + HashFormat hashFormat = HashFormat::SRI; bool truncate = false; HashType ht = htSHA256; std::vector paths; @@ -22,25 +22,25 @@ struct CmdHashBase : Command addFlag({ .longName = "sri", .description = "Print the hash in SRI format.", - .handler = {&base, SRI}, + .handler = {&hashFormat, HashFormat::SRI}, }); addFlag({ .longName = "base64", .description = "Print the hash in base-64 format.", - .handler = {&base, Base64}, + .handler = {&hashFormat, HashFormat::Base64}, }); addFlag({ .longName = "base32", .description = "Print the hash in base-32 (Nix-specific) format.", - .handler = {&base, Base32}, + .handler = {&hashFormat, HashFormat::Base32}, }); addFlag({ .longName = "base16", .description = "Print the hash in base-16 format.", - .handler = {&base, Base16}, + .handler = {&hashFormat, HashFormat::Base16}, }); addFlag(Flag::mkHashTypeFlag("type", &ht)); @@ -94,18 +94,18 @@ struct CmdHashBase : Command Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); - logger->cout(h.to_string(base, base == SRI)); + logger->cout(h.to_string(hashFormat, hashFormat == HashFormat::SRI)); } } }; struct CmdToBase : Command { - Base base; + HashFormat hashFormat; std::optional ht; std::vector args; - CmdToBase(Base base) : base(base) + CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat) { addFlag(Flag::mkHashTypeOptFlag("type", &ht)); expectArgs("strings", &args); @@ -114,16 +114,16 @@ struct CmdToBase : Command std::string description() override { return fmt("convert a hash to %s representation", - base == Base16 ? "base-16" : - base == Base32 ? "base-32" : - base == Base64 ? "base-64" : + hashFormat == HashFormat::Base16 ? "base-16" : + hashFormat == HashFormat::Base32 ? "base-32" : + hashFormat == HashFormat::Base64 ? "base-64" : "SRI"); } void run() override { for (auto s : args) - logger->cout(Hash::parseAny(s, ht).to_string(base, base == SRI)); + logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI)); } }; @@ -133,10 +133,10 @@ struct CmdHash : NixMultiCommand : MultiCommand({ {"file", []() { return make_ref(FileIngestionMethod::Flat);; }}, {"path", []() { return make_ref(FileIngestionMethod::Recursive); }}, - {"to-base16", []() { return make_ref(Base16); }}, - {"to-base32", []() { return make_ref(Base32); }}, - {"to-base64", []() { return make_ref(Base64); }}, - {"to-sri", []() { return make_ref(SRI); }}, + {"to-base16", []() { return make_ref(HashFormat::Base16); }}, + {"to-base32", []() { return make_ref(HashFormat::Base32); }}, + {"to-base64", []() { return make_ref(HashFormat::Base64); }}, + {"to-sri", []() { return make_ref(HashFormat::SRI); }}, }) { } @@ -162,7 +162,7 @@ static int compatNixHash(int argc, char * * argv) { std::optional ht; bool flat = false; - Base base = Base16; + HashFormat hashFormat = HashFormat::Base16; bool truncate = false; enum { opHash, opTo } op = opHash; std::vector ss; @@ -173,10 +173,10 @@ static int compatNixHash(int argc, char * * argv) else if (*arg == "--version") printVersion("nix-hash"); else if (*arg == "--flat") flat = true; - else if (*arg == "--base16") base = Base16; - else if (*arg == "--base32") base = Base32; - else if (*arg == "--base64") base = Base64; - else if (*arg == "--sri") base = SRI; + else if (*arg == "--base16") hashFormat = HashFormat::Base16; + else if (*arg == "--base32") hashFormat = HashFormat::Base32; + else if (*arg == "--base64") hashFormat = HashFormat::Base64; + else if (*arg == "--sri") hashFormat = HashFormat::SRI; else if (*arg == "--truncate") truncate = true; else if (*arg == "--type") { std::string s = getArg(*arg, arg, end); @@ -184,19 +184,19 @@ static int compatNixHash(int argc, char * * argv) } else if (*arg == "--to-base16") { op = opTo; - base = Base16; + hashFormat = HashFormat::Base16; } else if (*arg == "--to-base32") { op = opTo; - base = Base32; + hashFormat = HashFormat::Base32; } else if (*arg == "--to-base64") { op = opTo; - base = Base64; + hashFormat = HashFormat::Base64; } else if (*arg == "--to-sri") { op = opTo; - base = SRI; + hashFormat = HashFormat::SRI; } else if (*arg != "" && arg->at(0) == '-') return false; @@ -209,14 +209,14 @@ static int compatNixHash(int argc, char * * argv) CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive); if (!ht.has_value()) ht = htMD5; cmd.ht = ht.value(); - cmd.base = base; + cmd.hashFormat = hashFormat; cmd.truncate = truncate; cmd.paths = ss; cmd.run(); } else { - CmdToBase cmd(base); + CmdToBase cmd(hashFormat); cmd.args = ss; if (ht.has_value()) cmd.ht = ht; cmd.run(); diff --git a/src/nix/local.mk b/src/nix/local.mk index 20ea29d10..57f8259c4 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -31,7 +31,7 @@ src/nix/develop.cc: src/nix/get-env.sh.gen.hh src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh -src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh +src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh doc/manual/generate-settings.nix.gen.hh doc/manual/generate-store-info.nix.gen.hh src/nix/doc/files/%.md: doc/manual/src/command-ref/files/%.md @mkdir -p $$(dirname $@) diff --git a/src/nix/log.cc b/src/nix/log.cc index aaf829764..9a9bd30f9 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -33,6 +33,17 @@ struct CmdLog : InstallableCommand auto b = installable->toDerivedPath(); + // For compat with CLI today, TODO revisit + auto oneUp = std::visit(overloaded { + [&](const DerivedPath::Opaque & bo) { + return make_ref(bo); + }, + [&](const DerivedPath::Built & bfd) { + return bfd.drvPath; + }, + }, b.path.raw()); + auto path = resolveDerivedPath(*store, *oneUp); + RunPager pager; for (auto & sub : subs) { auto * logSubP = dynamic_cast(&*sub); @@ -42,14 +53,7 @@ struct CmdLog : InstallableCommand } auto & logSub = *logSubP; - auto log = std::visit(overloaded { - [&](const DerivedPath::Opaque & bo) { - return logSub.getBuildLog(bo.path); - }, - [&](const DerivedPath::Built & bfd) { - return logSub.getBuildLog(bfd.drvPath); - }, - }, b.path.raw()); + auto log = logSub.getBuildLog(path); if (!log) continue; stopProgressBar(); printInfo("got build log for '%s' from '%s'", installable->what(), logSub.getUri()); diff --git a/src/nix/main.cc b/src/nix/main.cc index c5a9c8b33..ffba10099 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -1,5 +1,6 @@ #include +#include "args/root.hh" #include "command.hh" #include "common-args.hh" #include "eval.hh" @@ -12,6 +13,7 @@ #include "finally.hh" #include "loggers.hh" #include "markdown.hh" +#include "memory-input-accessor.hh" #include #include @@ -55,7 +57,7 @@ static bool haveInternet() std::string programPath; -struct NixArgs : virtual MultiCommand, virtual MixCommonArgs +struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs { bool useNet = true; bool refresh = false; @@ -204,21 +206,29 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) auto vGenerateManpage = state.allocValue(); state.eval(state.parseExprFromString( #include "generate-manpage.nix.gen.hh" - , CanonPath::root), *vGenerateManpage); + , state.rootPath(CanonPath::root)), *vGenerateManpage); - auto vUtils = state.allocValue(); - state.cacheFile( - CanonPath("/utils.nix"), CanonPath("/utils.nix"), - state.parseExprFromString( - #include "utils.nix.gen.hh" - , CanonPath::root), - *vUtils); + state.corepkgsFS->addFile( + CanonPath("utils.nix"), + #include "utils.nix.gen.hh" + ); + + state.corepkgsFS->addFile( + CanonPath("/generate-settings.nix"), + #include "generate-settings.nix.gen.hh" + ); + + state.corepkgsFS->addFile( + CanonPath("/generate-store-info.nix"), + #include "generate-store-info.nix.gen.hh" + ); auto vDump = state.allocValue(); vDump->mkString(toplevel.dumpCli()); auto vRes = state.allocValue(); - state.callFunction(*vGenerateManpage, *vDump, *vRes, noPos); + state.callFunction(*vGenerateManpage, state.getBuiltin("false"), *vRes, noPos); + state.callFunction(*vRes, *vDump, *vRes, noPos); auto attr = vRes->attrs->get(state.symbols.create(mdName + ".md")); if (!attr) @@ -232,10 +242,7 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) static NixArgs & getNixArgs(Command & cmd) { - assert(cmd.parent); - MultiCommand * toplevel = cmd.parent; - while (toplevel->parent) toplevel = toplevel->parent; - return dynamic_cast(*toplevel); + return dynamic_cast(cmd.getRoot()); } struct CmdHelp : Command @@ -359,6 +366,7 @@ void mainWrapped(int argc, char * * argv) experimentalFeatureSettings.experimentalFeatures = { Xp::Flakes, Xp::FetchClosure, + Xp::DynamicDerivations, }; evalSettings.pureEval = false; EvalState state({}, openStore("dummy://")); @@ -402,16 +410,16 @@ void mainWrapped(int argc, char * * argv) Finally printCompletions([&]() { - if (completions) { - switch (completionType) { - case ctNormal: + if (args.completions) { + switch (args.completions->type) { + case Completions::Type::Normal: logger->cout("normal"); break; - case ctFilenames: + case Completions::Type::Filenames: logger->cout("filenames"); break; - case ctAttrs: + case Completions::Type::Attrs: logger->cout("attrs"); break; } - for (auto & s : *completions) + for (auto & s : args.completions->completions) logger->cout(s.completion + "\t" + trim(s.description)); } }); @@ -419,7 +427,7 @@ void mainWrapped(int argc, char * * argv) try { args.parseCmdline(argvToStrings(argc, argv)); } catch (UsageError &) { - if (!args.helpRequested && !completions) throw; + if (!args.helpRequested && !args.completions) throw; } if (args.helpRequested) { @@ -436,10 +444,7 @@ void mainWrapped(int argc, char * * argv) return; } - if (completions) { - args.completionHook(); - return; - } + if (args.completions) return; if (args.showVersion) { printVersion(programName); diff --git a/src/nix/nix.md b/src/nix/nix.md index e0f459d6b..6e7e8a649 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -132,6 +132,8 @@ subcommands, these are `packages.`*system*, attributes `packages.x86_64-linux.hello`, `legacyPackages.x86_64-linux.hello` and `hello`. +If *attrpath* begins with `.` then no prefixes or defaults are attempted. This allows the form *flakeref*[`#.`*attrpath*], such as `github:NixOS/nixpkgs#.lib.fakeSha256` to avoid a search of `packages.*system*.lib.fakeSha256` + ### Store path Example: `/nix/store/v5sv61sszx301i0x6xysaqzla09nksnd-hello-2.10` diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 613c5b191..c16864d30 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -90,7 +90,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON std::cout << store->pathInfoToJSON( // FIXME: preserve order? StorePathSet(storePaths.begin(), storePaths.end()), - true, showClosureSize, SRI, AllowInvalid).dump(); + true, showClosureSize, HashFormat::SRI, AllowInvalid).dump(); } else { diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index b67d381ca..3ed7946a8 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -310,13 +310,13 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON if (json) { auto res = nlohmann::json::object(); res["storePath"] = store->printStorePath(storePath); - res["hash"] = hash.to_string(SRI, true); + res["hash"] = hash.to_string(HashFormat::SRI, true); logger->cout(res.dump()); } else { notice("Downloaded '%s' to '%s' (hash '%s').", url, store->printStorePath(storePath), - hash.to_string(SRI, true)); + hash.to_string(HashFormat::SRI, true)); } } }; diff --git a/src/nix/registry.cc b/src/nix/registry.cc index cb94bbd31..f509ccae8 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -175,8 +175,8 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand .label = "locked", .optional = true, .handler = {&locked}, - .completer = {[&](size_t, std::string_view prefix) { - completeFlakeRef(getStore(), prefix); + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); }} }); } diff --git a/src/nix/repl.cc b/src/nix/repl.cc index 9677c1b48..63fe3044b 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -47,7 +47,7 @@ struct CmdRepl : RawInstallablesCommand void applyDefaultInstallables(std::vector & rawInstallables) override { - if (!experimentalFeatureSettings.isEnabled(Xp::ReplFlake) && !(file) && rawInstallables.size() >= 1) { + if (!experimentalFeatureSettings.isEnabled(Xp::Flakes) && !(file) && rawInstallables.size() >= 1) { warn("future versions of Nix will require using `--file` to load a file"); if (rawInstallables.size() > 1) warn("more than one input file is not currently supported"); diff --git a/src/nix/repl.md b/src/nix/repl.md index c5113be61..32c08e24b 100644 --- a/src/nix/repl.md +++ b/src/nix/repl.md @@ -36,16 +36,13 @@ R""( Loading Installable ''... Added 1 variables. - # nix repl --extra-experimental-features 'flakes repl-flake' nixpkgs + # nix repl --extra-experimental-features 'flakes' nixpkgs Loading Installable 'flake:nixpkgs#'... Added 5 variables. nix-repl> legacyPackages.x86_64-linux.emacs.name "emacs-27.1" - nix-repl> legacyPackages.x86_64-linux.emacs.name - "emacs-27.1" - nix-repl> :q # nix repl --expr 'import {}' diff --git a/src/nix/shell.md b/src/nix/shell.md index 1668104b1..f36919575 100644 --- a/src/nix/shell.md +++ b/src/nix/shell.md @@ -26,7 +26,7 @@ R""( * Run multiple commands in a shell environment: ```console - # nix shell nixpkgs#gnumake --command sh --command "cd src && make" + # nix shell nixpkgs#gnumake --command sh -c "cd src && make" ``` * Run GNU Hello in a chroot store: diff --git a/src/nix/ping-store.cc b/src/nix/store-info.cc similarity index 78% rename from src/nix/ping-store.cc rename to src/nix/store-info.cc index ec450e8e0..a7c595761 100644 --- a/src/nix/ping-store.cc +++ b/src/nix/store-info.cc @@ -17,7 +17,7 @@ struct CmdPingStore : StoreCommand, MixJSON std::string doc() override { return - #include "ping-store.md" + #include "store-info.md" ; } @@ -46,4 +46,15 @@ struct CmdPingStore : StoreCommand, MixJSON } }; -static auto rCmdPingStore = registerCommand2({"store", "ping"}); +struct CmdInfoStore : CmdPingStore +{ + void run(nix::ref store) override + { + warn("'nix store ping' is a deprecated alias for 'nix store info'"); + CmdPingStore::run(store); + } +}; + + +static auto rCmdPingStore = registerCommand2({"store", "info"}); +static auto rCmdInfoStore = registerCommand2({"store", "ping"}); diff --git a/src/nix/ping-store.md b/src/nix/store-info.md similarity index 82% rename from src/nix/ping-store.md rename to src/nix/store-info.md index 8c846791b..f86efd722 100644 --- a/src/nix/ping-store.md +++ b/src/nix/store-info.md @@ -5,19 +5,19 @@ R""( * Test whether connecting to a remote Nix store via SSH works: ```console - # nix store ping --store ssh://mac1 + # nix store info --store ssh://mac1 ``` * Test whether a URL is a valid binary cache: ```console - # nix store ping --store https://cache.nixos.org + # nix store info --store https://cache.nixos.org ``` * Test whether the Nix daemon is up and running: ```console - # nix store ping --store daemon + # nix store info --store daemon ``` # Description diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 0b306cc11..adaa33c0c 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -108,8 +108,8 @@ struct CmdVerify : StorePathsCommand act2.result(resCorruptedPath, store->printStorePath(info->path)); printError("path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(info->path), - info->narHash.to_string(Base32, true), - hash.first.to_string(Base32, true)); + info->narHash.to_string(HashFormat::Base32, true), + hash.first.to_string(HashFormat::Base32, true)); } } diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index 592de773c..055cf6d0d 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -38,17 +38,13 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions expectArgs({ .label = "package", .handler = {&_package}, - .completer = {[&](size_t, std::string_view prefix) { - completeInstallable(prefix); - }} + .completer = getCompleteInstallable(), }); expectArgs({ .label = "dependency", .handler = {&_dependency}, - .completer = {[&](size_t, std::string_view prefix) { - completeInstallable(prefix); - }} + .completer = getCompleteInstallable(), }); addFlag({ diff --git a/tests/dyn-drv/local.mk b/tests/dyn-drv/local.mk deleted file mode 100644 index f065a5627..000000000 --- a/tests/dyn-drv/local.mk +++ /dev/null @@ -1,11 +0,0 @@ -dyn-drv-tests := \ - $(d)/text-hashed-output.sh \ - $(d)/recursive-mod-json.sh - -install-tests-groups += dyn-drv - -clean-files += \ - $(d)/config.nix - -test-deps += \ - tests/dyn-drv/config.nix diff --git a/tests/add.sh b/tests/functional/add.sh similarity index 100% rename from tests/add.sh rename to tests/functional/add.sh diff --git a/tests/bad.tar.xz b/tests/functional/bad.tar.xz similarity index 100% rename from tests/bad.tar.xz rename to tests/functional/bad.tar.xz diff --git a/tests/bash-profile.sh b/tests/functional/bash-profile.sh similarity index 78% rename from tests/bash-profile.sh rename to tests/functional/bash-profile.sh index e2e0d1090..3faeaaba1 100644 --- a/tests/bash-profile.sh +++ b/tests/functional/bash-profile.sh @@ -1,6 +1,6 @@ source common.sh -sed -e "s|@localstatedir@|$TEST_ROOT/profile-var|g" -e "s|@coreutils@|$coreutils|g" < ../scripts/nix-profile.sh.in > $TEST_ROOT/nix-profile.sh +sed -e "s|@localstatedir@|$TEST_ROOT/profile-var|g" -e "s|@coreutils@|$coreutils|g" < ../../scripts/nix-profile.sh.in > $TEST_ROOT/nix-profile.sh user=$(whoami) rm -rf $TEST_HOME $TEST_ROOT/profile-var diff --git a/tests/big-derivation-attr.nix b/tests/functional/big-derivation-attr.nix similarity index 100% rename from tests/big-derivation-attr.nix rename to tests/functional/big-derivation-attr.nix diff --git a/tests/binary-cache-build-remote.sh b/tests/functional/binary-cache-build-remote.sh similarity index 100% rename from tests/binary-cache-build-remote.sh rename to tests/functional/binary-cache-build-remote.sh diff --git a/tests/binary-cache.sh b/tests/functional/binary-cache.sh similarity index 100% rename from tests/binary-cache.sh rename to tests/functional/binary-cache.sh diff --git a/tests/brotli.sh b/tests/functional/brotli.sh similarity index 100% rename from tests/brotli.sh rename to tests/functional/brotli.sh diff --git a/tests/build-delete.sh b/tests/functional/build-delete.sh similarity index 100% rename from tests/build-delete.sh rename to tests/functional/build-delete.sh diff --git a/tests/build-dry.sh b/tests/functional/build-dry.sh similarity index 100% rename from tests/build-dry.sh rename to tests/functional/build-dry.sh diff --git a/tests/build-hook-ca-fixed.nix b/tests/functional/build-hook-ca-fixed.nix similarity index 91% rename from tests/build-hook-ca-fixed.nix rename to tests/functional/build-hook-ca-fixed.nix index 4cb9e85d1..0ce6d9b12 100644 --- a/tests/build-hook-ca-fixed.nix +++ b/tests/functional/build-hook-ca-fixed.nix @@ -8,7 +8,10 @@ let derivation ({ inherit system; builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; + args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '')]; outputHashMode = "recursive"; outputHashAlgo = "sha256"; } // removeAttrs args ["builder" "meta" "passthru"]) diff --git a/tests/build-hook-ca-floating.nix b/tests/functional/build-hook-ca-floating.nix similarity index 100% rename from tests/build-hook-ca-floating.nix rename to tests/functional/build-hook-ca-floating.nix diff --git a/tests/build-hook.nix b/tests/functional/build-hook.nix similarity index 90% rename from tests/build-hook.nix rename to tests/functional/build-hook.nix index 7effd7903..99a13aee4 100644 --- a/tests/build-hook.nix +++ b/tests/functional/build-hook.nix @@ -14,7 +14,10 @@ let derivation ({ inherit system; builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; + args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '')]; } // removeAttrs args ["builder" "meta" "passthru"] // caArgs) // { meta = args.meta or {}; passthru = args.passthru or {}; }; diff --git a/tests/build-remote-content-addressed-fixed.sh b/tests/functional/build-remote-content-addressed-fixed.sh similarity index 100% rename from tests/build-remote-content-addressed-fixed.sh rename to tests/functional/build-remote-content-addressed-fixed.sh diff --git a/tests/build-remote-content-addressed-floating.sh b/tests/functional/build-remote-content-addressed-floating.sh similarity index 100% rename from tests/build-remote-content-addressed-floating.sh rename to tests/functional/build-remote-content-addressed-floating.sh diff --git a/tests/build-remote-input-addressed.sh b/tests/functional/build-remote-input-addressed.sh similarity index 100% rename from tests/build-remote-input-addressed.sh rename to tests/functional/build-remote-input-addressed.sh diff --git a/tests/build-remote-trustless-after.sh b/tests/functional/build-remote-trustless-after.sh similarity index 100% rename from tests/build-remote-trustless-after.sh rename to tests/functional/build-remote-trustless-after.sh diff --git a/tests/build-remote-trustless-should-fail-0.sh b/tests/functional/build-remote-trustless-should-fail-0.sh similarity index 100% rename from tests/build-remote-trustless-should-fail-0.sh rename to tests/functional/build-remote-trustless-should-fail-0.sh diff --git a/tests/build-remote-trustless-should-pass-0.sh b/tests/functional/build-remote-trustless-should-pass-0.sh similarity index 100% rename from tests/build-remote-trustless-should-pass-0.sh rename to tests/functional/build-remote-trustless-should-pass-0.sh diff --git a/tests/build-remote-trustless-should-pass-1.sh b/tests/functional/build-remote-trustless-should-pass-1.sh similarity index 100% rename from tests/build-remote-trustless-should-pass-1.sh rename to tests/functional/build-remote-trustless-should-pass-1.sh diff --git a/tests/build-remote-trustless-should-pass-2.sh b/tests/functional/build-remote-trustless-should-pass-2.sh similarity index 100% rename from tests/build-remote-trustless-should-pass-2.sh rename to tests/functional/build-remote-trustless-should-pass-2.sh diff --git a/tests/build-remote-trustless-should-pass-3.sh b/tests/functional/build-remote-trustless-should-pass-3.sh similarity index 100% rename from tests/build-remote-trustless-should-pass-3.sh rename to tests/functional/build-remote-trustless-should-pass-3.sh diff --git a/tests/build-remote-trustless.sh b/tests/functional/build-remote-trustless.sh similarity index 82% rename from tests/build-remote-trustless.sh rename to tests/functional/build-remote-trustless.sh index 9df44e0c5..81e5253bf 100644 --- a/tests/build-remote-trustless.sh +++ b/tests/functional/build-remote-trustless.sh @@ -6,7 +6,7 @@ unset NIX_STATE_DIR remoteDir=$TEST_ROOT/remote -# Note: ssh{-ng}://localhost bypasses ssh. See tests/build-remote.sh for +# Note: ssh{-ng}://localhost bypasses ssh. See tests/functional/build-remote.sh for # more details. nix-build $file -o $TEST_ROOT/result --max-jobs 0 \ --arg busybox $busybox \ diff --git a/tests/build-remote.sh b/tests/functional/build-remote.sh similarity index 100% rename from tests/build-remote.sh rename to tests/functional/build-remote.sh diff --git a/tests/build.sh b/tests/functional/build.sh similarity index 98% rename from tests/build.sh rename to tests/functional/build.sh index 8ae20f0df..7fbdb0f07 100644 --- a/tests/build.sh +++ b/tests/functional/build.sh @@ -78,7 +78,7 @@ expectStderr 1 nix build --impure --expr 'with (import ./multiple-outputs.nix).e | grepQuiet "has 2 entries in its context. It should only have exactly one entry" nix build --impure --json --expr 'builtins.unsafeDiscardOutputDependency (import ./multiple-outputs.nix).e.a_a.drvPath' --no-link | jq --exit-status ' - (.[0] | .path | match(".*multiple-outputs-e.drv")) + (.[0] | match(".*multiple-outputs-e.drv")) ' # Test building from raw store path to drv not expression. diff --git a/tests/ca-shell.nix b/tests/functional/ca-shell.nix similarity index 100% rename from tests/ca-shell.nix rename to tests/functional/ca-shell.nix diff --git a/tests/functional/ca/build-cache.sh b/tests/functional/ca/build-cache.sh new file mode 100644 index 000000000..6a4080fec --- /dev/null +++ b/tests/functional/ca/build-cache.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +source common.sh + +# The substituters didn't work prior to this time. +requireDaemonNewerThan "2.18.0pre20230808" + +drv=$(nix-instantiate ./content-addressed.nix -A rootCA --arg seed 1)^out +nix derivation show "$drv" --arg seed 1 + +buildAttr () { + local derivationPath=$1 + local seedValue=$2 + shift; shift + local args=("./content-addressed.nix" "-A" "$derivationPath" --arg seed "$seedValue" "--no-out-link") + args+=("$@") + nix-build "${args[@]}" +} + +copyAttr () { + local derivationPath=$1 + local seedValue=$2 + shift; shift + local args=("-f" "./content-addressed.nix" "$derivationPath" --arg seed "$seedValue") + args+=("$@") + # Note: to copy CA derivations, we need to copy the realisations, which + # currently requires naming the installables, not just the derivation output + # path. + nix copy --to file://$cacheDir "${args[@]}" +} + +testRemoteCacheFor () { + local derivationPath=$1 + clearCache + copyAttr "$derivationPath" 1 + clearStore + # Check nothing gets built. + buildAttr "$derivationPath" 1 --option substituters file://$cacheDir --no-require-sigs |& grepQuietInverse " will be built:" +} + +testRemoteCache () { + testRemoteCacheFor rootCA + testRemoteCacheFor dependentCA + testRemoteCacheFor dependentNonCA + testRemoteCacheFor dependentFixedOutput + testRemoteCacheFor dependentForBuildCA + testRemoteCacheFor dependentForBuildNonCA +} + +clearStore +testRemoteCache \ No newline at end of file diff --git a/tests/ca/build-dry.sh b/tests/functional/ca/build-dry.sh similarity index 100% rename from tests/ca/build-dry.sh rename to tests/functional/ca/build-dry.sh diff --git a/tests/ca/build-with-garbage-path.sh b/tests/functional/ca/build-with-garbage-path.sh similarity index 100% rename from tests/ca/build-with-garbage-path.sh rename to tests/functional/ca/build-with-garbage-path.sh diff --git a/tests/ca/build.sh b/tests/functional/ca/build.sh similarity index 82% rename from tests/ca/build.sh rename to tests/functional/ca/build.sh index 7754ad276..e1a8a7625 100644 --- a/tests/ca/build.sh +++ b/tests/functional/ca/build.sh @@ -2,7 +2,7 @@ source common.sh -drv=$(nix-instantiate ./content-addressed.nix -A rootCA --arg seed 1) +drv=$(nix-instantiate ./content-addressed.nix -A rootCA --arg seed 1)^out nix derivation show "$drv" --arg seed 1 buildAttr () { @@ -14,14 +14,6 @@ buildAttr () { nix-build "${args[@]}" } -testRemoteCache () { - clearCache - local outPath=$(buildAttr dependentNonCA 1) - nix copy --to file://$cacheDir $outPath - clearStore - buildAttr dependentNonCA 1 --option substituters file://$cacheDir --no-require-sigs |& grepQuietInverse "building dependent-non-ca" -} - testDeterministicCA () { [[ $(buildAttr rootCA 1) = $(buildAttr rootCA 2) ]] } @@ -66,8 +58,6 @@ testNormalization () { test "$(stat -c %Y $outPath)" -eq 1 } -# Disabled until we have it properly working -# testRemoteCache clearStore testNormalization testDeterministicCA diff --git a/tests/ca/common.sh b/tests/functional/ca/common.sh similarity index 100% rename from tests/ca/common.sh rename to tests/functional/ca/common.sh diff --git a/tests/ca/concurrent-builds.sh b/tests/functional/ca/concurrent-builds.sh similarity index 100% rename from tests/ca/concurrent-builds.sh rename to tests/functional/ca/concurrent-builds.sh diff --git a/tests/ca/config.nix.in b/tests/functional/ca/config.nix.in similarity index 100% rename from tests/ca/config.nix.in rename to tests/functional/ca/config.nix.in diff --git a/tests/ca/content-addressed.nix b/tests/functional/ca/content-addressed.nix similarity index 83% rename from tests/ca/content-addressed.nix rename to tests/functional/ca/content-addressed.nix index 81bc4bf5c..2559c562f 100644 --- a/tests/ca/content-addressed.nix +++ b/tests/functional/ca/content-addressed.nix @@ -61,6 +61,24 @@ rec { echo ${rootCA}/non-ca-hello > $out/dep ''; }; + dependentForBuildCA = mkCADerivation { + name = "dependent-for-build-ca"; + buildCommand = '' + echo "Depends on rootCA for building only" + mkdir -p $out + echo ${rootCA} + touch $out + ''; + }; + dependentForBuildNonCA = mkDerivation { + name = "dependent-for-build-non-ca"; + buildCommand = '' + echo "Depends on rootCA for building only" + mkdir -p $out + echo ${rootCA} + touch $out + ''; + }; dependentFixedOutput = mkDerivation { name = "dependent-fixed-output"; outputHashMode = "recursive"; diff --git a/tests/ca/derivation-json.sh b/tests/functional/ca/derivation-json.sh similarity index 100% rename from tests/ca/derivation-json.sh rename to tests/functional/ca/derivation-json.sh diff --git a/tests/ca/duplicate-realisation-in-closure.sh b/tests/functional/ca/duplicate-realisation-in-closure.sh similarity index 100% rename from tests/ca/duplicate-realisation-in-closure.sh rename to tests/functional/ca/duplicate-realisation-in-closure.sh diff --git a/tests/ca/flake.nix b/tests/functional/ca/flake.nix similarity index 100% rename from tests/ca/flake.nix rename to tests/functional/ca/flake.nix diff --git a/tests/ca/gc.sh b/tests/functional/ca/gc.sh similarity index 100% rename from tests/ca/gc.sh rename to tests/functional/ca/gc.sh diff --git a/tests/ca/import-derivation.sh b/tests/functional/ca/import-derivation.sh similarity index 100% rename from tests/ca/import-derivation.sh rename to tests/functional/ca/import-derivation.sh diff --git a/tests/ca/local.mk b/tests/functional/ca/local.mk similarity index 90% rename from tests/ca/local.mk rename to tests/functional/ca/local.mk index d15312708..fd87b8d1f 100644 --- a/tests/ca/local.mk +++ b/tests/functional/ca/local.mk @@ -1,6 +1,7 @@ ca-tests := \ $(d)/build-with-garbage-path.sh \ $(d)/build.sh \ + $(d)/build-cache.sh \ $(d)/concurrent-builds.sh \ $(d)/derivation-json.sh \ $(d)/duplicate-realisation-in-closure.sh \ @@ -24,4 +25,4 @@ clean-files += \ $(d)/config.nix test-deps += \ - tests/ca/config.nix + tests/functional/ca/config.nix diff --git a/tests/ca/new-build-cmd.sh b/tests/functional/ca/new-build-cmd.sh similarity index 100% rename from tests/ca/new-build-cmd.sh rename to tests/functional/ca/new-build-cmd.sh diff --git a/tests/ca/nix-copy.sh b/tests/functional/ca/nix-copy.sh similarity index 100% rename from tests/ca/nix-copy.sh rename to tests/functional/ca/nix-copy.sh diff --git a/tests/ca/nix-run.sh b/tests/functional/ca/nix-run.sh similarity index 100% rename from tests/ca/nix-run.sh rename to tests/functional/ca/nix-run.sh diff --git a/tests/ca/nix-shell.sh b/tests/functional/ca/nix-shell.sh similarity index 100% rename from tests/ca/nix-shell.sh rename to tests/functional/ca/nix-shell.sh diff --git a/tests/ca/nondeterministic.nix b/tests/functional/ca/nondeterministic.nix similarity index 100% rename from tests/ca/nondeterministic.nix rename to tests/functional/ca/nondeterministic.nix diff --git a/tests/ca/post-hook.sh b/tests/functional/ca/post-hook.sh similarity index 100% rename from tests/ca/post-hook.sh rename to tests/functional/ca/post-hook.sh diff --git a/tests/ca/racy.nix b/tests/functional/ca/racy.nix similarity index 100% rename from tests/ca/racy.nix rename to tests/functional/ca/racy.nix diff --git a/tests/ca/recursive.sh b/tests/functional/ca/recursive.sh similarity index 100% rename from tests/ca/recursive.sh rename to tests/functional/ca/recursive.sh diff --git a/tests/ca/repl.sh b/tests/functional/ca/repl.sh similarity index 100% rename from tests/ca/repl.sh rename to tests/functional/ca/repl.sh diff --git a/tests/ca/selfref-gc.sh b/tests/functional/ca/selfref-gc.sh similarity index 100% rename from tests/ca/selfref-gc.sh rename to tests/functional/ca/selfref-gc.sh diff --git a/tests/ca/signatures.sh b/tests/functional/ca/signatures.sh similarity index 100% rename from tests/ca/signatures.sh rename to tests/functional/ca/signatures.sh diff --git a/tests/ca/substitute.sh b/tests/functional/ca/substitute.sh similarity index 100% rename from tests/ca/substitute.sh rename to tests/functional/ca/substitute.sh diff --git a/tests/ca/why-depends.sh b/tests/functional/ca/why-depends.sh similarity index 100% rename from tests/ca/why-depends.sh rename to tests/functional/ca/why-depends.sh diff --git a/tests/case-hack.sh b/tests/functional/case-hack.sh similarity index 100% rename from tests/case-hack.sh rename to tests/functional/case-hack.sh diff --git a/tests/case.nar b/tests/functional/case.nar similarity index 100% rename from tests/case.nar rename to tests/functional/case.nar diff --git a/tests/check-refs.nix b/tests/functional/check-refs.nix similarity index 97% rename from tests/check-refs.nix rename to tests/functional/check-refs.nix index 99d69a226..89690e456 100644 --- a/tests/check-refs.nix +++ b/tests/functional/check-refs.nix @@ -2,7 +2,7 @@ with import ./config.nix; rec { - dep = import ./dependencies.nix; + dep = import ./dependencies.nix {}; makeTest = nr: args: mkDerivation ({ name = "check-refs-" + toString nr; diff --git a/tests/check-refs.sh b/tests/functional/check-refs.sh similarity index 100% rename from tests/check-refs.sh rename to tests/functional/check-refs.sh diff --git a/tests/check-reqs.nix b/tests/functional/check-reqs.nix similarity index 100% rename from tests/check-reqs.nix rename to tests/functional/check-reqs.nix diff --git a/tests/check-reqs.sh b/tests/functional/check-reqs.sh similarity index 100% rename from tests/check-reqs.sh rename to tests/functional/check-reqs.sh diff --git a/tests/check.nix b/tests/functional/check.nix similarity index 100% rename from tests/check.nix rename to tests/functional/check.nix diff --git a/tests/check.sh b/tests/functional/check.sh similarity index 100% rename from tests/check.sh rename to tests/functional/check.sh diff --git a/tests/common.sh b/tests/functional/common.sh similarity index 100% rename from tests/common.sh rename to tests/functional/common.sh diff --git a/tests/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in similarity index 97% rename from tests/common/vars-and-functions.sh.in rename to tests/functional/common/vars-and-functions.sh.in index dc7ce13cc..967d6be54 100644 --- a/tests/common/vars-and-functions.sh.in +++ b/tests/functional/common/vars-and-functions.sh.in @@ -6,7 +6,7 @@ COMMON_VARS_AND_FUNCTIONS_SH_SOURCED=1 export PS4='+(${BASH_SOURCE[0]-$0}:$LINENO) ' -export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default} +export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default/tests\/functional//} export NIX_STORE_DIR if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then # Maybe the build directory is symlinked. @@ -195,7 +195,7 @@ expect() { shift "$@" && res=0 || res="$?" if [[ $res -ne $expected ]]; then - echo "Expected '$expected' but got '$res' while running '${*@Q}'" >&2 + echo "Expected exit code '$expected' but got '$res' from command ${*@Q}" >&2 return 1 fi return 0 @@ -209,7 +209,7 @@ expectStderr() { shift "$@" 2>&1 && res=0 || res="$?" if [[ $res -ne $expected ]]; then - echo "Expected '$expected' but got '$res' while running '${*@Q}'" >&2 + echo "Expected exit code '$expected' but got '$res' from command ${*@Q}" >&2 return 1 fi return 0 diff --git a/tests/completions.sh b/tests/functional/completions.sh similarity index 91% rename from tests/completions.sh rename to tests/functional/completions.sh index 19dc61098..7c1e4b287 100644 --- a/tests/completions.sh +++ b/tests/functional/completions.sh @@ -48,6 +48,8 @@ EOF [[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]] ## With tilde expansion [[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]] +[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=5 nix flake show '~/foo' --update-input '')" == $'normal\na\t' ]] +[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix run '~/foo' --update-input '')" == $'normal\na\t' ]] ## Out of order [[ "$(NIX_GET_COMPLETIONS=3 nix build --update-input '' ./foo)" == $'normal\na\t' ]] [[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --update-input '' ./bar)" == $'normal\na\t\nb\t' ]] diff --git a/tests/compression-levels.sh b/tests/functional/compression-levels.sh similarity index 100% rename from tests/compression-levels.sh rename to tests/functional/compression-levels.sh diff --git a/tests/compute-levels.sh b/tests/functional/compute-levels.sh similarity index 100% rename from tests/compute-levels.sh rename to tests/functional/compute-levels.sh diff --git a/tests/config.nix.in b/tests/functional/config.nix.in similarity index 82% rename from tests/config.nix.in rename to tests/functional/config.nix.in index 7facbdcbc..00dc007e1 100644 --- a/tests/config.nix.in +++ b/tests/functional/config.nix.in @@ -20,7 +20,10 @@ rec { derivation ({ inherit system; builder = shell; - args = ["-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; + args = ["-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '')]; PATH = path; } // caArgs // removeAttrs args ["builder" "meta"]) // { meta = args.meta or {}; }; diff --git a/tests/config.sh b/tests/functional/config.sh similarity index 100% rename from tests/config.sh rename to tests/functional/config.sh diff --git a/tests/config/nix-with-substituters.conf b/tests/functional/config/nix-with-substituters.conf similarity index 100% rename from tests/config/nix-with-substituters.conf rename to tests/functional/config/nix-with-substituters.conf diff --git a/tests/db-migration.sh b/tests/functional/db-migration.sh similarity index 100% rename from tests/db-migration.sh rename to tests/functional/db-migration.sh diff --git a/tests/dependencies.builder0.sh b/tests/functional/dependencies.builder0.sh similarity index 100% rename from tests/dependencies.builder0.sh rename to tests/functional/dependencies.builder0.sh diff --git a/tests/dependencies.nix b/tests/functional/dependencies.nix similarity index 69% rename from tests/dependencies.nix rename to tests/functional/dependencies.nix index 45aca1793..be1a7ae9a 100644 --- a/tests/dependencies.nix +++ b/tests/functional/dependencies.nix @@ -1,3 +1,4 @@ +{ hashInvalidator ? "" }: with import ./config.nix; let { @@ -21,6 +22,17 @@ let { ''; }; + fod_input = mkDerivation { + name = "fod-input"; + buildCommand = '' + echo ${hashInvalidator} + echo FOD > $out + ''; + outputHashMode = "flat"; + outputHashAlgo = "sha256"; + outputHash = "1dq9p0hnm1y75q2x40fws5887bq1r840hzdxak0a9djbwvx0b16d"; + }; + body = mkDerivation { name = "dependencies-top"; builder = ./dependencies.builder0.sh + "/FOOBAR/../."; @@ -29,6 +41,7 @@ let { input1_drv = input1; input2_drv = input2; input0_drv = input0; + fod_input_drv = fod_input; meta.description = "Random test package"; }; diff --git a/tests/dependencies.sh b/tests/functional/dependencies.sh similarity index 67% rename from tests/dependencies.sh rename to tests/functional/dependencies.sh index d5cd30396..b93dacac0 100644 --- a/tests/dependencies.sh +++ b/tests/functional/dependencies.sh @@ -53,3 +53,20 @@ nix-store -q --referrers-closure "$input2OutPath" | grep "$outPath" # Check that the derivers are set properly. test $(nix-store -q --deriver "$outPath") = "$drvPath" nix-store -q --deriver "$input2OutPath" | grepQuiet -- "-input-2.drv" + +# --valid-derivers returns the currently single valid .drv file +test "$(nix-store -q --valid-derivers "$outPath")" = "$drvPath" + +# instantiate a different drv with the same output +drvPath2=$(nix-instantiate dependencies.nix --argstr hashInvalidator yay) + +# now --valid-derivers returns both +test "$(nix-store -q --valid-derivers "$outPath" | sort)" = "$(sort <<< "$drvPath"$'\n'"$drvPath2")" + +# check that nix-store --valid-derivers only returns existing drv +nix-store --delete "$drvPath" +test "$(nix-store -q --valid-derivers "$outPath")" = "$drvPath2" + +# check that --valid-derivers returns nothing when there are no valid derivers +nix-store --delete "$drvPath2" +test -z "$(nix-store -q --valid-derivers "$outPath")" diff --git a/tests/derivation-json.sh b/tests/functional/derivation-json.sh similarity index 100% rename from tests/derivation-json.sh rename to tests/functional/derivation-json.sh diff --git a/tests/dummy b/tests/functional/dummy similarity index 100% rename from tests/dummy rename to tests/functional/dummy diff --git a/tests/dump-db.sh b/tests/functional/dump-db.sh similarity index 100% rename from tests/dump-db.sh rename to tests/functional/dump-db.sh diff --git a/tests/functional/dyn-drv/build-built-drv.sh b/tests/functional/dyn-drv/build-built-drv.sh new file mode 100644 index 000000000..647be9457 --- /dev/null +++ b/tests/functional/dyn-drv/build-built-drv.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +source common.sh + +# In the corresponding nix file, we have two derivations: the first, named `hello`, +# is a normal recursive derivation, while the second, named dependent, has the +# new outputHashMode "text". Note that in "dependent", we don't refer to the +# build output of `hello`, but only to the path of the drv file. For this reason, +# we only need to: +# +# - instantiate `hello` +# - build `producingDrv` +# - check that the path of the output coincides with that of the original derivation + +out1=$(nix build -f ./text-hashed-output.nix hello --no-link) + +clearStore + +drvDep=$(nix-instantiate ./text-hashed-output.nix -A producingDrv) + +expectStderr 1 nix build "${drvDep}^out^out" --no-link | grepQuiet "Building dynamic derivations in one shot is not yet implemented" diff --git a/tests/dyn-drv/common.sh b/tests/functional/dyn-drv/common.sh similarity index 100% rename from tests/dyn-drv/common.sh rename to tests/functional/dyn-drv/common.sh diff --git a/tests/dyn-drv/config.nix.in b/tests/functional/dyn-drv/config.nix.in similarity index 100% rename from tests/dyn-drv/config.nix.in rename to tests/functional/dyn-drv/config.nix.in diff --git a/tests/functional/dyn-drv/dep-built-drv.sh b/tests/functional/dyn-drv/dep-built-drv.sh new file mode 100644 index 000000000..4f6e9b080 --- /dev/null +++ b/tests/functional/dyn-drv/dep-built-drv.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +source common.sh + +out1=$(nix-build ./text-hashed-output.nix -A hello --no-out-link) + +clearStore + +expectStderr 1 nix-build ./text-hashed-output.nix -A wrapper --no-out-link | grepQuiet "Building dynamic derivations in one shot is not yet implemented" + +# diff -r $out1 $out2 diff --git a/tests/functional/dyn-drv/eval-outputOf.sh b/tests/functional/dyn-drv/eval-outputOf.sh new file mode 100644 index 000000000..9467feb8d --- /dev/null +++ b/tests/functional/dyn-drv/eval-outputOf.sh @@ -0,0 +1,80 @@ +#!/usr/bin/env bash + +source ./common.sh + +# Without the dynamic-derivations XP feature, we don't have the builtin. +nix --experimental-features 'nix-command' eval --impure --expr \ + 'assert ! (builtins ? outputOf); ""' + +# Test that a string is required. +# +# We currently require a string to be passed, rather than a derivation +# object that could be coerced to a string. We might liberalise this in +# the future so it does work, but there are some design questions to +# resolve first. Adding a test so we don't liberalise it by accident. +expectStderr 1 nix --experimental-features 'nix-command dynamic-derivations' eval --impure --expr \ + 'builtins.outputOf (import ../dependencies.nix {}) "out"' \ + | grepQuiet "value is a set while a string was expected" + +# Test that "DrvDeep" string contexts are not supported at this time +# +# Like the above, this is a restriction we could relax later. +expectStderr 1 nix --experimental-features 'nix-command dynamic-derivations' eval --impure --expr \ + 'builtins.outputOf (import ../dependencies.nix {}).drvPath "out"' \ + | grepQuiet "has a context which refers to a complete source and binary closure. This is not supported at this time" + +# Test using `builtins.outputOf` with static derivations +testStaticHello () { + nix eval --impure --expr \ + 'with (import ./text-hashed-output.nix); let + a = hello.outPath; + b = builtins.outputOf (builtins.unsafeDiscardOutputDependency hello.drvPath) "out"; + in builtins.trace a + (builtins.trace b + (assert a == b; null))' +} + +# Test with a regular old input-addresed derivation +# +# `builtins.outputOf` works without ca-derivations and doesn't create a +# placeholder but just returns the output path. +testStaticHello + +# Test with content addressed derivation. +NIX_TESTS_CA_BY_DEFAULT=1 testStaticHello + +# Test with derivation-producing derivation +# +# This is hardly different from the preceding cases, except that we're +# only taking 1 outputOf out of 2 possible outputOfs. Note that +# `.outPath` could be defined as `outputOf drvPath`, which is what we're +# testing here. The other `outputOf` that we're not testing here is the +# use of _dynamic_ derivations. +nix eval --impure --expr \ + 'with (import ./text-hashed-output.nix); let + a = producingDrv.outPath; + b = builtins.outputOf (builtins.builtins.unsafeDiscardOutputDependency producingDrv.drvPath) "out"; + in builtins.trace a + (builtins.trace b + (assert a == b; null))' + +# Test with unbuilt output of derivation-producing derivation. +# +# This function similar to `testStaticHello` used above, but instead of +# checking the property on a constant derivation, we check it on a +# derivation that's from another derivation's output (outPath). +testDynamicHello () { + nix eval --impure --expr \ + 'with (import ./text-hashed-output.nix); let + a = builtins.outputOf producingDrv.outPath "out"; + b = builtins.outputOf (builtins.outputOf (builtins.unsafeDiscardOutputDependency producingDrv.drvPath) "out") "out"; + in builtins.trace a + (builtins.trace b + (assert a == b; null))' +} + +# inner dynamic derivation is input-addressed +testDynamicHello + +# inner dynamic derivation is content-addressed +NIX_TESTS_CA_BY_DEFAULT=1 testDynamicHello diff --git a/tests/functional/dyn-drv/local.mk b/tests/functional/dyn-drv/local.mk new file mode 100644 index 000000000..c87534944 --- /dev/null +++ b/tests/functional/dyn-drv/local.mk @@ -0,0 +1,15 @@ +dyn-drv-tests := \ + $(d)/text-hashed-output.sh \ + $(d)/recursive-mod-json.sh \ + $(d)/build-built-drv.sh \ + $(d)/eval-outputOf.sh \ + $(d)/dep-built-drv.sh \ + $(d)/old-daemon-error-hack.sh + +install-tests-groups += dyn-drv + +clean-files += \ + $(d)/config.nix + +test-deps += \ + tests/functional/dyn-drv/config.nix diff --git a/tests/functional/dyn-drv/old-daemon-error-hack.nix b/tests/functional/dyn-drv/old-daemon-error-hack.nix new file mode 100644 index 000000000..c9d4a62d4 --- /dev/null +++ b/tests/functional/dyn-drv/old-daemon-error-hack.nix @@ -0,0 +1,20 @@ +with import ./config.nix; + +# A simple content-addressed derivation. +# The derivation can be arbitrarily modified by passing a different `seed`, +# but the output will always be the same +rec { + stub = mkDerivation { + name = "stub"; + buildCommand = '' + echo stub > $out + ''; + }; + wrapper = mkDerivation { + name = "has-dynamic-drv-dep"; + buildCommand = '' + exit 1 # we're not building this derivation + ${builtins.outputOf stub.outPath "out"} + ''; + }; +} diff --git a/tests/functional/dyn-drv/old-daemon-error-hack.sh b/tests/functional/dyn-drv/old-daemon-error-hack.sh new file mode 100644 index 000000000..43b049973 --- /dev/null +++ b/tests/functional/dyn-drv/old-daemon-error-hack.sh @@ -0,0 +1,11 @@ +# Purposely bypassing our usual common for this subgroup +source ../common.sh + +# Need backend to support text-hashing too +isDaemonNewer "2.18.0pre20230906" && skipTest "Daemon is too new" + +enableFeatures "ca-derivations dynamic-derivations" + +restartDaemon + +expectStderr 1 nix-instantiate --read-write-mode ./old-daemon-error-hack.nix | grepQuiet "the daemon is too old to understand dependencies on dynamic derivations" diff --git a/tests/dyn-drv/recursive-mod-json.nix b/tests/functional/dyn-drv/recursive-mod-json.nix similarity index 100% rename from tests/dyn-drv/recursive-mod-json.nix rename to tests/functional/dyn-drv/recursive-mod-json.nix diff --git a/tests/dyn-drv/recursive-mod-json.sh b/tests/functional/dyn-drv/recursive-mod-json.sh similarity index 93% rename from tests/dyn-drv/recursive-mod-json.sh rename to tests/functional/dyn-drv/recursive-mod-json.sh index 070c5c2cb..0698b81bd 100644 --- a/tests/dyn-drv/recursive-mod-json.sh +++ b/tests/functional/dyn-drv/recursive-mod-json.sh @@ -3,6 +3,8 @@ source common.sh # FIXME if [[ $(uname) != Linux ]]; then skipTest "Not running Linux"; fi +export NIX_TESTS_CA_BY_DEFAULT=1 + enableFeatures 'recursive-nix' restartDaemon diff --git a/tests/dyn-drv/text-hashed-output.nix b/tests/functional/dyn-drv/text-hashed-output.nix similarity index 74% rename from tests/dyn-drv/text-hashed-output.nix rename to tests/functional/dyn-drv/text-hashed-output.nix index a700fd102..99203b518 100644 --- a/tests/dyn-drv/text-hashed-output.nix +++ b/tests/functional/dyn-drv/text-hashed-output.nix @@ -12,9 +12,6 @@ rec { mkdir -p $out echo "Hello World" > $out/hello ''; - __contentAddressed = true; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; }; producingDrv = mkDerivation { name = "hello.drv"; @@ -26,4 +23,11 @@ rec { outputHashMode = "text"; outputHashAlgo = "sha256"; }; + wrapper = mkDerivation { + name = "use-dynamic-drv-in-non-dynamic-drv"; + buildCommand = '' + echo "Copying the output of the dynamic derivation" + cp -r ${builtins.outputOf producingDrv.outPath "out"} $out + ''; + }; } diff --git a/tests/dyn-drv/text-hashed-output.sh b/tests/functional/dyn-drv/text-hashed-output.sh similarity index 100% rename from tests/dyn-drv/text-hashed-output.sh rename to tests/functional/dyn-drv/text-hashed-output.sh diff --git a/tests/eval-store.sh b/tests/functional/eval-store.sh similarity index 100% rename from tests/eval-store.sh rename to tests/functional/eval-store.sh diff --git a/tests/eval.nix b/tests/functional/eval.nix similarity index 100% rename from tests/eval.nix rename to tests/functional/eval.nix diff --git a/tests/eval.sh b/tests/functional/eval.sh similarity index 100% rename from tests/eval.sh rename to tests/functional/eval.sh diff --git a/tests/experimental-features.sh b/tests/functional/experimental-features.sh similarity index 100% rename from tests/experimental-features.sh rename to tests/functional/experimental-features.sh diff --git a/tests/export-graph.nix b/tests/functional/export-graph.nix similarity index 81% rename from tests/export-graph.nix rename to tests/functional/export-graph.nix index fdac9583d..64fe36bd1 100644 --- a/tests/export-graph.nix +++ b/tests/functional/export-graph.nix @@ -17,13 +17,13 @@ rec { foo."bar.runtimeGraph" = mkDerivation { name = "dependencies"; builder = builtins.toFile "build-graph-builder" "${printRefs}"; - exportReferencesGraph = ["refs" (import ./dependencies.nix)]; + exportReferencesGraph = ["refs" (import ./dependencies.nix {})]; }; foo."bar.buildGraph" = mkDerivation { name = "dependencies"; builder = builtins.toFile "build-graph-builder" "${printRefs}"; - exportReferencesGraph = ["refs" (import ./dependencies.nix).drvPath]; + exportReferencesGraph = ["refs" (import ./dependencies.nix {}).drvPath]; }; } diff --git a/tests/export-graph.sh b/tests/functional/export-graph.sh similarity index 100% rename from tests/export-graph.sh rename to tests/functional/export-graph.sh diff --git a/tests/export.sh b/tests/functional/export.sh similarity index 100% rename from tests/export.sh rename to tests/functional/export.sh diff --git a/tests/failing.nix b/tests/functional/failing.nix similarity index 74% rename from tests/failing.nix rename to tests/functional/failing.nix index 2a0350d4d..d25e2d6b6 100644 --- a/tests/failing.nix +++ b/tests/functional/failing.nix @@ -6,7 +6,10 @@ let derivation ({ inherit system; builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; + args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '')]; } // removeAttrs args ["builder" "meta"]) // { meta = args.meta or {}; }; in diff --git a/tests/fetchClosure.sh b/tests/functional/fetchClosure.sh similarity index 100% rename from tests/fetchClosure.sh rename to tests/functional/fetchClosure.sh diff --git a/tests/fetchGit.sh b/tests/functional/fetchGit.sh similarity index 98% rename from tests/fetchGit.sh rename to tests/functional/fetchGit.sh index 418b4f63f..fc89f2040 100644 --- a/tests/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -35,6 +35,8 @@ unset _NIX_FORCE_HTTP path0=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath") path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = file://$TEST_ROOT/worktree; }).outPath") [[ $path0 = $path0_ ]] +path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree git+file://$TEST_ROOT/worktree).outPath") +[[ $path0 = $path0_ ]] export _NIX_FORCE_HTTP=1 [[ $(tail -n 1 $path0/hello) = "hello" ]] diff --git a/tests/fetchGitRefs.sh b/tests/functional/fetchGitRefs.sh similarity index 100% rename from tests/fetchGitRefs.sh rename to tests/functional/fetchGitRefs.sh diff --git a/tests/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh similarity index 100% rename from tests/fetchGitSubmodules.sh rename to tests/functional/fetchGitSubmodules.sh diff --git a/tests/fetchMercurial.sh b/tests/functional/fetchMercurial.sh similarity index 100% rename from tests/fetchMercurial.sh rename to tests/functional/fetchMercurial.sh diff --git a/tests/fetchPath.sh b/tests/functional/fetchPath.sh similarity index 100% rename from tests/fetchPath.sh rename to tests/functional/fetchPath.sh diff --git a/tests/fetchTree-file.sh b/tests/functional/fetchTree-file.sh similarity index 100% rename from tests/fetchTree-file.sh rename to tests/functional/fetchTree-file.sh diff --git a/tests/fetchurl.sh b/tests/functional/fetchurl.sh similarity index 100% rename from tests/fetchurl.sh rename to tests/functional/fetchurl.sh diff --git a/tests/filter-source.nix b/tests/functional/filter-source.nix similarity index 100% rename from tests/filter-source.nix rename to tests/functional/filter-source.nix diff --git a/tests/filter-source.sh b/tests/functional/filter-source.sh similarity index 100% rename from tests/filter-source.sh rename to tests/functional/filter-source.sh diff --git a/tests/fixed.builder1.sh b/tests/functional/fixed.builder1.sh similarity index 100% rename from tests/fixed.builder1.sh rename to tests/functional/fixed.builder1.sh diff --git a/tests/fixed.builder2.sh b/tests/functional/fixed.builder2.sh similarity index 100% rename from tests/fixed.builder2.sh rename to tests/functional/fixed.builder2.sh diff --git a/tests/fixed.nix b/tests/functional/fixed.nix similarity index 100% rename from tests/fixed.nix rename to tests/functional/fixed.nix diff --git a/tests/fixed.sh b/tests/functional/fixed.sh similarity index 100% rename from tests/fixed.sh rename to tests/functional/fixed.sh diff --git a/tests/functional/flakes/absolute-attr-paths.sh b/tests/functional/flakes/absolute-attr-paths.sh new file mode 100644 index 000000000..491adceb7 --- /dev/null +++ b/tests/functional/flakes/absolute-attr-paths.sh @@ -0,0 +1,17 @@ +source ./common.sh + +flake1Dir=$TEST_ROOT/flake1 + +mkdir -p $flake1Dir +cat > $flake1Dir/flake.nix < $flake2Dir/flake.nix < "$flake2Dir/flake.nix" < $flake2Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/default.nix < "$flake3Dir/default.nix" < $nonFlakeDir/README.md < "$nonFlakeDir/README.md" < $flake1Dir/foo -git -C $flake1Dir add $flake1Dir/foo +echo foo > "$flake1Dir/foo" +git -C "$flake1Dir" add $flake1Dir/foo [[ $(nix flake metadata flake1 --json --refresh | jq -r .dirtyRevision) == "$hash1-dirty" ]] -echo -n '# foo' >> $flake1Dir/flake.nix -flake1OriginalCommit=$(git -C $flake1Dir rev-parse HEAD) -git -C $flake1Dir commit -a -m 'Foo' -flake1NewCommit=$(git -C $flake1Dir rev-parse HEAD) +echo -n '# foo' >> "$flake1Dir/flake.nix" +flake1OriginalCommit=$(git -C "$flake1Dir" rev-parse HEAD) +git -C "$flake1Dir" commit -a -m 'Foo' +flake1NewCommit=$(git -C "$flake1Dir" rev-parse HEAD) hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision) [[ $(nix flake metadata flake1 --json --refresh | jq -r .dirtyRevision) == "null" ]] [[ $hash1 != $hash2 ]] # Test 'nix build' on a flake. -nix build -o $TEST_ROOT/result flake1#foo -[[ -e $TEST_ROOT/result/hello ]] +nix build -o "$TEST_ROOT/result" flake1#foo +[[ -e "$TEST_ROOT/result/hello" ]] # Test packages.default. -nix build -o $TEST_ROOT/result flake1 -[[ -e $TEST_ROOT/result/hello ]] +nix build -o "$TEST_ROOT/result" flake1 +[[ -e "$TEST_ROOT/result/hello" ]] -nix build -o $TEST_ROOT/result $flake1Dir -nix build -o $TEST_ROOT/result git+file://$flake1Dir +nix build -o "$TEST_ROOT/result" "$flake1Dir" +nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir" # Check that store symlinks inside a flake are not interpreted as flakes. -nix build -o $flake1Dir/result git+file://$flake1Dir -nix path-info $flake1Dir/result +nix build -o "$flake1Dir/result" "git+file://$flake1Dir" +nix path-info "$flake1Dir/result" # 'getFlake' on an unlocked flakeref should fail in pure mode, but # succeed in impure mode. -(! nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"$flake1Dir\").packages.$system.default") -nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"$flake1Dir\").packages.$system.default" --impure +(! nix build -o "$TEST_ROOT/result" --expr "(builtins.getFlake \"$flake1Dir\").packages.$system.default") +nix build -o "$TEST_ROOT/result" --expr "(builtins.getFlake \"$flake1Dir\").packages.$system.default" --impure # 'getFlake' on a locked flakeref should succeed even in pure mode. -nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"git+file://$flake1Dir?rev=$hash2\").packages.$system.default" +nix build -o "$TEST_ROOT/result" --expr "(builtins.getFlake \"git+file://$flake1Dir?rev=$hash2\").packages.$system.default" # Building a flake with an unlocked dependency should fail in pure mode. -(! nix build -o $TEST_ROOT/result flake2#bar --no-registries) -(! nix build -o $TEST_ROOT/result flake2#bar --no-use-registries) +(! nix build -o "$TEST_ROOT/result" flake2#bar --no-registries) +(! nix build -o "$TEST_ROOT/result" flake2#bar --no-use-registries) (! nix eval --expr "builtins.getFlake \"$flake2Dir\"") # But should succeed in impure mode. -(! nix build -o $TEST_ROOT/result flake2#bar --impure) -nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file +(! nix build -o "$TEST_ROOT/result" flake2#bar --impure) +nix build -o "$TEST_ROOT/result" flake2#bar --impure --no-write-lock-file nix eval --expr "builtins.getFlake \"$flake2Dir\"" --impure # Building a local flake with an unlocked dependency should fail with --no-update-lock-file. -expect 1 nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes' +expect 1 nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" --no-update-lock-file 2>&1 | grep 'requires lock file changes' # But it should succeed without that flag. -nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file -expect 1 nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes' -nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file -[[ -e $flake2Dir/flake.lock ]] -[[ -z $(git -C $flake2Dir diff main || echo failed) ]] +nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" --no-write-lock-file +expect 1 nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" --no-update-lock-file 2>&1 | grep 'requires lock file changes' +nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" --commit-lock-file +[[ -e "$flake2Dir/flake.lock" ]] +[[ -z $(git -C "$flake2Dir" diff main || echo failed) ]] # Rerunning the build should not change the lockfile. -nix build -o $TEST_ROOT/result $flake2Dir#bar -[[ -z $(git -C $flake2Dir diff main || echo failed) ]] +nix build -o "$TEST_ROOT/result" "$flake2Dir#bar" +[[ -z $(git -C "$flake2Dir" diff main || echo failed) ]] # Building with a lockfile should not require a fetch of the registry. -nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh -nix build -o $TEST_ROOT/result --no-registries $flake2Dir#bar --refresh -nix build -o $TEST_ROOT/result --no-use-registries $flake2Dir#bar --refresh +nix build -o "$TEST_ROOT/result" --flake-registry file:///no-registry.json "$flake2Dir#bar" --refresh +nix build -o "$TEST_ROOT/result" --no-registries "$flake2Dir#bar" --refresh +nix build -o "$TEST_ROOT/result" --no-use-registries "$flake2Dir#bar" --refresh # Updating the flake should not change the lockfile. -nix flake lock $flake2Dir -[[ -z $(git -C $flake2Dir diff main || echo failed) ]] +nix flake lock "$flake2Dir" +[[ -z $(git -C "$flake2Dir" diff main || echo failed) ]] # Now we should be able to build the flake in pure mode. -nix build -o $TEST_ROOT/result flake2#bar +nix build -o "$TEST_ROOT/result" flake2#bar # Or without a registry. -nix build -o $TEST_ROOT/result --no-registries git+file://$flake2Dir#bar --refresh -nix build -o $TEST_ROOT/result --no-use-registries git+file://$flake2Dir#bar --refresh +nix build -o "$TEST_ROOT/result" --no-registries "git+file://$percentEncodedFlake2Dir#bar" --refresh +nix build -o "$TEST_ROOT/result" --no-use-registries "git+file://$percentEncodedFlake2Dir#bar" --refresh # Test whether indirect dependencies work. -nix build -o $TEST_ROOT/result $flake3Dir#xyzzy -git -C $flake3Dir add flake.lock +nix build -o "$TEST_ROOT/result" "$flake3Dir#xyzzy" +git -C "$flake3Dir" add flake.lock # Add dependency to flake3. -rm $flake3Dir/flake.nix +rm "$flake3Dir/flake.nix" -cat > $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix < $flake3Dir/flake.nix < "$flake3Dir/flake.nix" < $flake3Dir/flake.nix <' -A x) = 123 ]] [[ $(NIX_PATH=flake3=flake:flake3 nix-instantiate --eval '' -A x) = 123 ]] # Test alternate lockfile paths. -nix flake lock $flake2Dir --output-lock-file $TEST_ROOT/flake2.lock -cmp $flake2Dir/flake.lock $TEST_ROOT/flake2.lock >/dev/null # lockfiles should be identical, since we're referencing flake2's original one +nix flake lock "$flake2Dir" --output-lock-file $TEST_ROOT/flake2.lock +cmp "$flake2Dir/flake.lock" $TEST_ROOT/flake2.lock >/dev/null # lockfiles should be identical, since we're referencing flake2's original one -nix flake lock $flake2Dir --output-lock-file $TEST_ROOT/flake2-overridden.lock --override-input flake1 git+file://$flake1Dir?rev=$flake1OriginalCommit -expectStderr 1 cmp $flake2Dir/flake.lock $TEST_ROOT/flake2-overridden.lock -nix flake metadata $flake2Dir --reference-lock-file $TEST_ROOT/flake2-overridden.lock | grepQuiet $flake1OriginalCommit +nix flake lock "$flake2Dir" --output-lock-file $TEST_ROOT/flake2-overridden.lock --override-input flake1 git+file://$flake1Dir?rev=$flake1OriginalCommit +expectStderr 1 cmp "$flake2Dir/flake.lock" $TEST_ROOT/flake2-overridden.lock +nix flake metadata "$flake2Dir" --reference-lock-file $TEST_ROOT/flake2-overridden.lock | grepQuiet $flake1OriginalCommit # reference-lock-file can only be used if allow-dirty is set. -expectStderr 1 nix flake metadata $flake2Dir --no-allow-dirty --reference-lock-file $TEST_ROOT/flake2-overridden.lock +expectStderr 1 nix flake metadata "$flake2Dir" --no-allow-dirty --reference-lock-file $TEST_ROOT/flake2-overridden.lock diff --git a/tests/flakes/follow-paths.sh b/tests/functional/flakes/follow-paths.sh similarity index 51% rename from tests/flakes/follow-paths.sh rename to tests/functional/flakes/follow-paths.sh index fe9b51c65..8573b5511 100644 --- a/tests/flakes/follow-paths.sh +++ b/tests/functional/flakes/follow-paths.sh @@ -146,5 +146,117 @@ EOF git -C $flakeFollowsA add flake.nix -nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'" -nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'" +nix flake lock "$flakeFollowsA" 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'" +nix flake lock "$flakeFollowsA" 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'" + +# Now test follow path overloading +# This tests a lockfile checking regression https://github.com/NixOS/nix/pull/8819 +# +# We construct the following graph, where p->q means p has input q. +# A double edge means that the edge gets overridden using `follows`. +# +# A +# / \ +# / \ +# v v +# B ==> C --- follows declared in A +# \\ / +# \\/ --- follows declared in B +# v +# D +# +# The message was +# error: input 'B/D' follows a non-existent input 'B/C/D' +# +# Note that for `B` to resolve its follow for `D`, it needs `C/D`, for which it needs to resolve the follow on `C` first. +flakeFollowsOverloadA="$TEST_ROOT/follows/overload/flakeA" +flakeFollowsOverloadB="$TEST_ROOT/follows/overload/flakeA/flakeB" +flakeFollowsOverloadC="$TEST_ROOT/follows/overload/flakeA/flakeB/flakeC" +flakeFollowsOverloadD="$TEST_ROOT/follows/overload/flakeA/flakeB/flakeC/flakeD" + +# Test following path flakerefs. +createGitRepo "$flakeFollowsOverloadA" +mkdir -p "$flakeFollowsOverloadB" +mkdir -p "$flakeFollowsOverloadC" +mkdir -p "$flakeFollowsOverloadD" + +cat > "$flakeFollowsOverloadD/flake.nix" < "$flakeFollowsOverloadC/flake.nix" < "$flakeFollowsOverloadB/flake.nix" < "$flakeFollowsOverloadA/flake.nix" < baz +# The message was +# error: follow cycle detected: [baz -> foo -> bar -> baz] +flakeFollowCycle="$TEST_ROOT/follows/followCycle" + +# Test following path flakerefs. +mkdir -p "$flakeFollowCycle" + +cat > $flakeFollowCycle/flake.nix <&1 && fail "nix flake lock should have failed." || true) +echo $checkRes | grep -F "error: follow cycle detected: [baz -> foo -> bar -> baz]" diff --git a/tests/flakes/init.sh b/tests/functional/flakes/init.sh similarity index 100% rename from tests/flakes/init.sh rename to tests/functional/flakes/init.sh diff --git a/tests/flakes/inputs.sh b/tests/functional/flakes/inputs.sh similarity index 100% rename from tests/flakes/inputs.sh rename to tests/functional/flakes/inputs.sh diff --git a/tests/flakes/mercurial.sh b/tests/functional/flakes/mercurial.sh similarity index 100% rename from tests/flakes/mercurial.sh rename to tests/functional/flakes/mercurial.sh diff --git a/tests/flakes/run.sh b/tests/functional/flakes/run.sh similarity index 100% rename from tests/flakes/run.sh rename to tests/functional/flakes/run.sh diff --git a/tests/flakes/search-root.sh b/tests/functional/flakes/search-root.sh similarity index 100% rename from tests/flakes/search-root.sh rename to tests/functional/flakes/search-root.sh diff --git a/tests/flakes/show.sh b/tests/functional/flakes/show.sh similarity index 100% rename from tests/flakes/show.sh rename to tests/functional/flakes/show.sh diff --git a/tests/flakes/unlocked-override.sh b/tests/functional/flakes/unlocked-override.sh similarity index 100% rename from tests/flakes/unlocked-override.sh rename to tests/functional/flakes/unlocked-override.sh diff --git a/tests/fmt.sh b/tests/functional/fmt.sh similarity index 100% rename from tests/fmt.sh rename to tests/functional/fmt.sh diff --git a/tests/fmt.simple.sh b/tests/functional/fmt.simple.sh similarity index 100% rename from tests/fmt.simple.sh rename to tests/functional/fmt.simple.sh diff --git a/tests/function-trace.sh b/tests/functional/function-trace.sh similarity index 100% rename from tests/function-trace.sh rename to tests/functional/function-trace.sh diff --git a/tests/gc-auto.sh b/tests/functional/gc-auto.sh similarity index 100% rename from tests/gc-auto.sh rename to tests/functional/gc-auto.sh diff --git a/tests/gc-concurrent.builder.sh b/tests/functional/gc-concurrent.builder.sh similarity index 100% rename from tests/gc-concurrent.builder.sh rename to tests/functional/gc-concurrent.builder.sh diff --git a/tests/gc-concurrent.nix b/tests/functional/gc-concurrent.nix similarity index 100% rename from tests/gc-concurrent.nix rename to tests/functional/gc-concurrent.nix diff --git a/tests/gc-concurrent.sh b/tests/functional/gc-concurrent.sh similarity index 100% rename from tests/gc-concurrent.sh rename to tests/functional/gc-concurrent.sh diff --git a/tests/gc-concurrent2.builder.sh b/tests/functional/gc-concurrent2.builder.sh similarity index 100% rename from tests/gc-concurrent2.builder.sh rename to tests/functional/gc-concurrent2.builder.sh diff --git a/tests/gc-non-blocking.sh b/tests/functional/gc-non-blocking.sh similarity index 100% rename from tests/gc-non-blocking.sh rename to tests/functional/gc-non-blocking.sh diff --git a/tests/gc-runtime.nix b/tests/functional/gc-runtime.nix similarity index 100% rename from tests/gc-runtime.nix rename to tests/functional/gc-runtime.nix diff --git a/tests/gc-runtime.sh b/tests/functional/gc-runtime.sh similarity index 100% rename from tests/gc-runtime.sh rename to tests/functional/gc-runtime.sh diff --git a/tests/gc.sh b/tests/functional/gc.sh similarity index 100% rename from tests/gc.sh rename to tests/functional/gc.sh diff --git a/tests/hash-check.nix b/tests/functional/hash-check.nix similarity index 100% rename from tests/hash-check.nix rename to tests/functional/hash-check.nix diff --git a/tests/hash.sh b/tests/functional/hash.sh similarity index 100% rename from tests/hash.sh rename to tests/functional/hash.sh diff --git a/tests/hermetic.nix b/tests/functional/hermetic.nix similarity index 90% rename from tests/hermetic.nix rename to tests/functional/hermetic.nix index c4fbbfa14..810180ac6 100644 --- a/tests/hermetic.nix +++ b/tests/functional/hermetic.nix @@ -14,7 +14,10 @@ let derivation ({ inherit system; builder = busybox; - args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; + args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' + if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; + eval "$buildCommand" + '')]; } // removeAttrs args ["builder" "meta" "passthru"] // caArgs) // { meta = args.meta or {}; passthru = args.passthru or {}; }; diff --git a/tests/import-derivation.nix b/tests/functional/import-derivation.nix similarity index 100% rename from tests/import-derivation.nix rename to tests/functional/import-derivation.nix diff --git a/tests/import-derivation.sh b/tests/functional/import-derivation.sh similarity index 100% rename from tests/import-derivation.sh rename to tests/functional/import-derivation.sh diff --git a/tests/impure-derivations.nix b/tests/functional/impure-derivations.nix similarity index 100% rename from tests/impure-derivations.nix rename to tests/functional/impure-derivations.nix diff --git a/tests/impure-derivations.sh b/tests/functional/impure-derivations.sh similarity index 100% rename from tests/impure-derivations.sh rename to tests/functional/impure-derivations.sh diff --git a/tests/functional/impure-env.nix b/tests/functional/impure-env.nix new file mode 100644 index 000000000..2b0380ed7 --- /dev/null +++ b/tests/functional/impure-env.nix @@ -0,0 +1,16 @@ +{ var, value }: + +with import ./config.nix; + +mkDerivation { + name = "test"; + buildCommand = '' + echo ${var} = "''$${var}" + echo -n "''$${var}" > "$out" + ''; + + impureEnvVars = [ var ]; + + outputHashAlgo = "sha256"; + outputHash = builtins.hashString "sha256" value; +} diff --git a/tests/functional/impure-env.sh b/tests/functional/impure-env.sh new file mode 100644 index 000000000..d9e4a34a2 --- /dev/null +++ b/tests/functional/impure-env.sh @@ -0,0 +1,33 @@ +source common.sh + +# Needs the config option 'impure-env' to work +requireDaemonNewerThan "2.18.0pre20230816" + +enableFeatures "configurable-impure-env" +restartDaemon + +varTest() { + local var="$1"; shift + local value="$1"; shift + nix build --no-link -vL --argstr var "$var" --argstr value "$value" --impure "$@" --file impure-env.nix + clearStore +} + +clearStore +startDaemon + +varTest env_name value --impure-env env_name=value + +echo 'impure-env = set_in_config=config_value' >> "$NIX_CONF_DIR/nix.conf" +set_in_config=daemon_value restartDaemon + +varTest set_in_config config_value +varTest set_in_config client_value --impure-env set_in_config=client_value + +sed -i -e '/^trusted-users =/d' "$NIX_CONF_DIR/nix.conf" + +env_name=daemon_value restartDaemon + +varTest env_name daemon_value --impure-env env_name=client_value + +killDaemon diff --git a/tests/init.sh b/tests/functional/init.sh similarity index 100% rename from tests/init.sh rename to tests/functional/init.sh diff --git a/tests/install-darwin.sh b/tests/functional/install-darwin.sh similarity index 100% rename from tests/install-darwin.sh rename to tests/functional/install-darwin.sh diff --git a/tests/lang-test-infra.sh b/tests/functional/lang-test-infra.sh similarity index 100% rename from tests/lang-test-infra.sh rename to tests/functional/lang-test-infra.sh diff --git a/tests/lang.sh b/tests/functional/lang.sh similarity index 96% rename from tests/lang.sh rename to tests/functional/lang.sh index 75dbbc38e..c3acef5ee 100755 --- a/tests/lang.sh +++ b/tests/functional/lang.sh @@ -68,7 +68,7 @@ for i in lang/eval-fail-*.nix; do echo "evaluating $i (should fail)"; i=$(basename "$i" .nix) if - expectStderr 1 nix-instantiate --show-trace "lang/$i.nix" \ + expectStderr 1 nix-instantiate --eval --strict --show-trace "lang/$i.nix" \ | sed "s!$(pwd)!/pwd!g" > "lang/$i.err" then diffAndAccept "$i" err err.exp @@ -134,7 +134,7 @@ else echo '' echo 'You can rerun this test with:' echo '' - echo ' _NIX_TEST_ACCEPT=1 make tests/lang.sh.test' + echo ' _NIX_TEST_ACCEPT=1 make tests/functional/lang.sh.test' echo '' echo 'to regenerate the files containing the expected output,' echo 'and then view the git diff to decide whether a change is' diff --git a/tests/lang/binary-data b/tests/functional/lang/binary-data similarity index 100% rename from tests/lang/binary-data rename to tests/functional/lang/binary-data diff --git a/tests/lang/data b/tests/functional/lang/data similarity index 100% rename from tests/lang/data rename to tests/functional/lang/data diff --git a/tests/lang/dir1/a.nix b/tests/functional/lang/dir1/a.nix similarity index 100% rename from tests/lang/dir1/a.nix rename to tests/functional/lang/dir1/a.nix diff --git a/tests/lang/dir2/a.nix b/tests/functional/lang/dir2/a.nix similarity index 100% rename from tests/lang/dir2/a.nix rename to tests/functional/lang/dir2/a.nix diff --git a/tests/lang/dir2/b.nix b/tests/functional/lang/dir2/b.nix similarity index 100% rename from tests/lang/dir2/b.nix rename to tests/functional/lang/dir2/b.nix diff --git a/tests/lang/dir3/a.nix b/tests/functional/lang/dir3/a.nix similarity index 100% rename from tests/lang/dir3/a.nix rename to tests/functional/lang/dir3/a.nix diff --git a/tests/lang/dir3/b.nix b/tests/functional/lang/dir3/b.nix similarity index 100% rename from tests/lang/dir3/b.nix rename to tests/functional/lang/dir3/b.nix diff --git a/tests/lang/dir3/c.nix b/tests/functional/lang/dir3/c.nix similarity index 100% rename from tests/lang/dir3/c.nix rename to tests/functional/lang/dir3/c.nix diff --git a/tests/lang/dir4/a.nix b/tests/functional/lang/dir4/a.nix similarity index 100% rename from tests/lang/dir4/a.nix rename to tests/functional/lang/dir4/a.nix diff --git a/tests/lang/dir4/c.nix b/tests/functional/lang/dir4/c.nix similarity index 100% rename from tests/lang/dir4/c.nix rename to tests/functional/lang/dir4/c.nix diff --git a/tests/lang/empty.exp b/tests/functional/lang/empty.exp similarity index 100% rename from tests/lang/empty.exp rename to tests/functional/lang/empty.exp diff --git a/tests/lang/eval-fail-abort.err.exp b/tests/functional/lang/eval-fail-abort.err.exp similarity index 100% rename from tests/lang/eval-fail-abort.err.exp rename to tests/functional/lang/eval-fail-abort.err.exp diff --git a/tests/lang/eval-fail-abort.nix b/tests/functional/lang/eval-fail-abort.nix similarity index 100% rename from tests/lang/eval-fail-abort.nix rename to tests/functional/lang/eval-fail-abort.nix diff --git a/tests/lang/eval-fail-assert.err.exp b/tests/functional/lang/eval-fail-assert.err.exp similarity index 100% rename from tests/lang/eval-fail-assert.err.exp rename to tests/functional/lang/eval-fail-assert.err.exp diff --git a/tests/lang/eval-fail-assert.nix b/tests/functional/lang/eval-fail-assert.nix similarity index 100% rename from tests/lang/eval-fail-assert.nix rename to tests/functional/lang/eval-fail-assert.nix diff --git a/tests/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp similarity index 100% rename from tests/lang/eval-fail-bad-string-interpolation-1.err.exp rename to tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp diff --git a/tests/lang/eval-fail-bad-string-interpolation-1.nix b/tests/functional/lang/eval-fail-bad-string-interpolation-1.nix similarity index 100% rename from tests/lang/eval-fail-bad-string-interpolation-1.nix rename to tests/functional/lang/eval-fail-bad-string-interpolation-1.nix diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-2.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-2.err.exp new file mode 100644 index 000000000..dea119ae8 --- /dev/null +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-2.err.exp @@ -0,0 +1 @@ +error: getting status of '/pwd/lang/fnord': No such file or directory diff --git a/tests/lang/eval-fail-bad-string-interpolation-2.nix b/tests/functional/lang/eval-fail-bad-string-interpolation-2.nix similarity index 100% rename from tests/lang/eval-fail-bad-string-interpolation-2.nix rename to tests/functional/lang/eval-fail-bad-string-interpolation-2.nix diff --git a/tests/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp similarity index 100% rename from tests/lang/eval-fail-bad-string-interpolation-3.err.exp rename to tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp diff --git a/tests/lang/eval-fail-bad-string-interpolation-3.nix b/tests/functional/lang/eval-fail-bad-string-interpolation-3.nix similarity index 100% rename from tests/lang/eval-fail-bad-string-interpolation-3.nix rename to tests/functional/lang/eval-fail-bad-string-interpolation-3.nix diff --git a/tests/lang/eval-fail-blackhole.err.exp b/tests/functional/lang/eval-fail-blackhole.err.exp similarity index 100% rename from tests/lang/eval-fail-blackhole.err.exp rename to tests/functional/lang/eval-fail-blackhole.err.exp diff --git a/tests/lang/eval-fail-blackhole.nix b/tests/functional/lang/eval-fail-blackhole.nix similarity index 100% rename from tests/lang/eval-fail-blackhole.nix rename to tests/functional/lang/eval-fail-blackhole.nix diff --git a/tests/lang/eval-fail-deepseq.err.exp b/tests/functional/lang/eval-fail-deepseq.err.exp similarity index 100% rename from tests/lang/eval-fail-deepseq.err.exp rename to tests/functional/lang/eval-fail-deepseq.err.exp diff --git a/tests/lang/eval-fail-deepseq.nix b/tests/functional/lang/eval-fail-deepseq.nix similarity index 100% rename from tests/lang/eval-fail-deepseq.nix rename to tests/functional/lang/eval-fail-deepseq.nix diff --git a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp new file mode 100644 index 000000000..c5fa67523 --- /dev/null +++ b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp @@ -0,0 +1,18 @@ +error: + … while evaluating the attribute 'set' + + at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:3: + + 1| { + 2| set = { "${"" + "b"}" = 1; }; + | ^ + 3| set = { "${"b" + ""}" = 2; }; + + error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11 + + at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11: + + 2| set = { "${"" + "b"}" = 1; }; + 3| set = { "${"b" + ""}" = 2; }; + | ^ + 4| } diff --git a/tests/lang/eval-fail-dup-dynamic-attrs.nix b/tests/functional/lang/eval-fail-dup-dynamic-attrs.nix similarity index 100% rename from tests/lang/eval-fail-dup-dynamic-attrs.nix rename to tests/functional/lang/eval-fail-dup-dynamic-attrs.nix diff --git a/tests/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp similarity index 100% rename from tests/lang/eval-fail-foldlStrict-strict-op-application.err.exp rename to tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp diff --git a/tests/lang/eval-fail-foldlStrict-strict-op-application.nix b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.nix similarity index 100% rename from tests/lang/eval-fail-foldlStrict-strict-op-application.nix rename to tests/functional/lang/eval-fail-foldlStrict-strict-op-application.nix diff --git a/tests/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp similarity index 85% rename from tests/lang/eval-fail-fromTOML-timestamps.err.exp rename to tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp index f6bd19f5a..5b60d253d 100644 --- a/tests/lang/eval-fail-fromTOML-timestamps.err.exp +++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp @@ -8,5 +8,3 @@ error: 2| key = "value" error: while parsing a TOML string: Dates and times are not supported - - at «none»:0: (source not available) diff --git a/tests/lang/eval-fail-fromTOML-timestamps.nix b/tests/functional/lang/eval-fail-fromTOML-timestamps.nix similarity index 100% rename from tests/lang/eval-fail-fromTOML-timestamps.nix rename to tests/functional/lang/eval-fail-fromTOML-timestamps.nix diff --git a/tests/lang/eval-fail-hashfile-missing.err.exp b/tests/functional/lang/eval-fail-hashfile-missing.err.exp similarity index 84% rename from tests/lang/eval-fail-hashfile-missing.err.exp rename to tests/functional/lang/eval-fail-hashfile-missing.err.exp index 8e77dec1e..6d38608c0 100644 --- a/tests/lang/eval-fail-hashfile-missing.err.exp +++ b/tests/functional/lang/eval-fail-hashfile-missing.err.exp @@ -10,10 +10,6 @@ error: … while evaluating the first argument passed to builtins.toString - at «none»:0: (source not available) - … while calling the 'hashFile' builtin - at «none»:0: (source not available) - error: opening file '/pwd/lang/this-file-is-definitely-not-there-7392097': No such file or directory diff --git a/tests/lang/eval-fail-hashfile-missing.nix b/tests/functional/lang/eval-fail-hashfile-missing.nix similarity index 100% rename from tests/lang/eval-fail-hashfile-missing.nix rename to tests/functional/lang/eval-fail-hashfile-missing.nix diff --git a/tests/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp similarity index 100% rename from tests/lang/eval-fail-list.err.exp rename to tests/functional/lang/eval-fail-list.err.exp diff --git a/tests/lang/eval-fail-list.nix b/tests/functional/lang/eval-fail-list.nix similarity index 100% rename from tests/lang/eval-fail-list.nix rename to tests/functional/lang/eval-fail-list.nix diff --git a/tests/lang/eval-fail-missing-arg.err.exp b/tests/functional/lang/eval-fail-missing-arg.err.exp similarity index 100% rename from tests/lang/eval-fail-missing-arg.err.exp rename to tests/functional/lang/eval-fail-missing-arg.err.exp diff --git a/tests/lang/eval-fail-missing-arg.nix b/tests/functional/lang/eval-fail-missing-arg.nix similarity index 100% rename from tests/lang/eval-fail-missing-arg.nix rename to tests/functional/lang/eval-fail-missing-arg.nix diff --git a/tests/functional/lang/eval-fail-nonexist-path.err.exp b/tests/functional/lang/eval-fail-nonexist-path.err.exp new file mode 100644 index 000000000..dea119ae8 --- /dev/null +++ b/tests/functional/lang/eval-fail-nonexist-path.err.exp @@ -0,0 +1 @@ +error: getting status of '/pwd/lang/fnord': No such file or directory diff --git a/tests/lang/eval-fail-nonexist-path.nix b/tests/functional/lang/eval-fail-nonexist-path.nix similarity index 100% rename from tests/lang/eval-fail-nonexist-path.nix rename to tests/functional/lang/eval-fail-nonexist-path.nix diff --git a/tests/lang/eval-fail-path-slash.err.exp b/tests/functional/lang/eval-fail-path-slash.err.exp similarity index 100% rename from tests/lang/eval-fail-path-slash.err.exp rename to tests/functional/lang/eval-fail-path-slash.err.exp diff --git a/tests/lang/eval-fail-path-slash.nix b/tests/functional/lang/eval-fail-path-slash.nix similarity index 100% rename from tests/lang/eval-fail-path-slash.nix rename to tests/functional/lang/eval-fail-path-slash.nix diff --git a/tests/lang/eval-fail-recursion.err.exp b/tests/functional/lang/eval-fail-recursion.err.exp similarity index 100% rename from tests/lang/eval-fail-recursion.err.exp rename to tests/functional/lang/eval-fail-recursion.err.exp diff --git a/tests/lang/eval-fail-recursion.nix b/tests/functional/lang/eval-fail-recursion.nix similarity index 100% rename from tests/lang/eval-fail-recursion.nix rename to tests/functional/lang/eval-fail-recursion.nix diff --git a/tests/lang/eval-fail-remove.err.exp b/tests/functional/lang/eval-fail-remove.err.exp similarity index 100% rename from tests/lang/eval-fail-remove.err.exp rename to tests/functional/lang/eval-fail-remove.err.exp diff --git a/tests/lang/eval-fail-remove.nix b/tests/functional/lang/eval-fail-remove.nix similarity index 100% rename from tests/lang/eval-fail-remove.nix rename to tests/functional/lang/eval-fail-remove.nix diff --git a/tests/lang/eval-fail-scope-5.err.exp b/tests/functional/lang/eval-fail-scope-5.err.exp similarity index 100% rename from tests/lang/eval-fail-scope-5.err.exp rename to tests/functional/lang/eval-fail-scope-5.err.exp diff --git a/tests/lang/eval-fail-scope-5.nix b/tests/functional/lang/eval-fail-scope-5.nix similarity index 100% rename from tests/lang/eval-fail-scope-5.nix rename to tests/functional/lang/eval-fail-scope-5.nix diff --git a/tests/lang/eval-fail-seq.err.exp b/tests/functional/lang/eval-fail-seq.err.exp similarity index 100% rename from tests/lang/eval-fail-seq.err.exp rename to tests/functional/lang/eval-fail-seq.err.exp diff --git a/tests/lang/eval-fail-seq.nix b/tests/functional/lang/eval-fail-seq.nix similarity index 100% rename from tests/lang/eval-fail-seq.nix rename to tests/functional/lang/eval-fail-seq.nix diff --git a/tests/lang/eval-fail-set-override.err.exp b/tests/functional/lang/eval-fail-set-override.err.exp similarity index 71% rename from tests/lang/eval-fail-set-override.err.exp rename to tests/functional/lang/eval-fail-set-override.err.exp index beb29d678..71481683d 100644 --- a/tests/lang/eval-fail-set-override.err.exp +++ b/tests/functional/lang/eval-fail-set-override.err.exp @@ -1,6 +1,4 @@ error: … while evaluating the `__overrides` attribute - at «none»:0: (source not available) - error: value is an integer while a set was expected diff --git a/tests/lang/eval-fail-set-override.nix b/tests/functional/lang/eval-fail-set-override.nix similarity index 100% rename from tests/lang/eval-fail-set-override.nix rename to tests/functional/lang/eval-fail-set-override.nix diff --git a/tests/lang/eval-fail-set.err.exp b/tests/functional/lang/eval-fail-set.err.exp similarity index 100% rename from tests/lang/eval-fail-set.err.exp rename to tests/functional/lang/eval-fail-set.err.exp diff --git a/tests/lang/eval-fail-set.nix b/tests/functional/lang/eval-fail-set.nix similarity index 100% rename from tests/lang/eval-fail-set.nix rename to tests/functional/lang/eval-fail-set.nix diff --git a/tests/lang/eval-fail-substring.err.exp b/tests/functional/lang/eval-fail-substring.err.exp similarity index 84% rename from tests/lang/eval-fail-substring.err.exp rename to tests/functional/lang/eval-fail-substring.err.exp index dc26a00bd..5c58be29a 100644 --- a/tests/lang/eval-fail-substring.err.exp +++ b/tests/functional/lang/eval-fail-substring.err.exp @@ -8,5 +8,3 @@ error: 2| error: negative start position in 'substring' - - at «none»:0: (source not available) diff --git a/tests/lang/eval-fail-substring.nix b/tests/functional/lang/eval-fail-substring.nix similarity index 100% rename from tests/lang/eval-fail-substring.nix rename to tests/functional/lang/eval-fail-substring.nix diff --git a/tests/lang/eval-fail-to-path.err.exp b/tests/functional/lang/eval-fail-to-path.err.exp similarity index 86% rename from tests/lang/eval-fail-to-path.err.exp rename to tests/functional/lang/eval-fail-to-path.err.exp index 43ed2bdfc..4ffa2cf6d 100644 --- a/tests/lang/eval-fail-to-path.err.exp +++ b/tests/functional/lang/eval-fail-to-path.err.exp @@ -9,6 +9,4 @@ error: … while evaluating the first argument passed to builtins.toPath - at «none»:0: (source not available) - error: string 'foo/bar' doesn't represent an absolute path diff --git a/tests/lang/eval-fail-to-path.nix b/tests/functional/lang/eval-fail-to-path.nix similarity index 100% rename from tests/lang/eval-fail-to-path.nix rename to tests/functional/lang/eval-fail-to-path.nix diff --git a/tests/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp similarity index 100% rename from tests/lang/eval-fail-toJSON.err.exp rename to tests/functional/lang/eval-fail-toJSON.err.exp diff --git a/tests/lang/eval-fail-toJSON.nix b/tests/functional/lang/eval-fail-toJSON.nix similarity index 100% rename from tests/lang/eval-fail-toJSON.nix rename to tests/functional/lang/eval-fail-toJSON.nix diff --git a/tests/lang/eval-fail-undeclared-arg.err.exp b/tests/functional/lang/eval-fail-undeclared-arg.err.exp similarity index 100% rename from tests/lang/eval-fail-undeclared-arg.err.exp rename to tests/functional/lang/eval-fail-undeclared-arg.err.exp diff --git a/tests/lang/eval-fail-undeclared-arg.nix b/tests/functional/lang/eval-fail-undeclared-arg.nix similarity index 100% rename from tests/lang/eval-fail-undeclared-arg.nix rename to tests/functional/lang/eval-fail-undeclared-arg.nix diff --git a/tests/lang/eval-okay-any-all.exp b/tests/functional/lang/eval-okay-any-all.exp similarity index 100% rename from tests/lang/eval-okay-any-all.exp rename to tests/functional/lang/eval-okay-any-all.exp diff --git a/tests/lang/eval-okay-any-all.nix b/tests/functional/lang/eval-okay-any-all.nix similarity index 100% rename from tests/lang/eval-okay-any-all.nix rename to tests/functional/lang/eval-okay-any-all.nix diff --git a/tests/lang/eval-okay-arithmetic.exp b/tests/functional/lang/eval-okay-arithmetic.exp similarity index 100% rename from tests/lang/eval-okay-arithmetic.exp rename to tests/functional/lang/eval-okay-arithmetic.exp diff --git a/tests/lang/eval-okay-arithmetic.nix b/tests/functional/lang/eval-okay-arithmetic.nix similarity index 100% rename from tests/lang/eval-okay-arithmetic.nix rename to tests/functional/lang/eval-okay-arithmetic.nix diff --git a/tests/lang/eval-okay-attrnames.exp b/tests/functional/lang/eval-okay-attrnames.exp similarity index 100% rename from tests/lang/eval-okay-attrnames.exp rename to tests/functional/lang/eval-okay-attrnames.exp diff --git a/tests/lang/eval-okay-attrnames.nix b/tests/functional/lang/eval-okay-attrnames.nix similarity index 100% rename from tests/lang/eval-okay-attrnames.nix rename to tests/functional/lang/eval-okay-attrnames.nix diff --git a/tests/lang/eval-okay-attrs.exp b/tests/functional/lang/eval-okay-attrs.exp similarity index 100% rename from tests/lang/eval-okay-attrs.exp rename to tests/functional/lang/eval-okay-attrs.exp diff --git a/tests/lang/eval-okay-attrs.nix b/tests/functional/lang/eval-okay-attrs.nix similarity index 100% rename from tests/lang/eval-okay-attrs.nix rename to tests/functional/lang/eval-okay-attrs.nix diff --git a/tests/lang/eval-okay-attrs2.exp b/tests/functional/lang/eval-okay-attrs2.exp similarity index 100% rename from tests/lang/eval-okay-attrs2.exp rename to tests/functional/lang/eval-okay-attrs2.exp diff --git a/tests/lang/eval-okay-attrs2.nix b/tests/functional/lang/eval-okay-attrs2.nix similarity index 100% rename from tests/lang/eval-okay-attrs2.nix rename to tests/functional/lang/eval-okay-attrs2.nix diff --git a/tests/lang/eval-okay-attrs3.exp b/tests/functional/lang/eval-okay-attrs3.exp similarity index 100% rename from tests/lang/eval-okay-attrs3.exp rename to tests/functional/lang/eval-okay-attrs3.exp diff --git a/tests/lang/eval-okay-attrs3.nix b/tests/functional/lang/eval-okay-attrs3.nix similarity index 100% rename from tests/lang/eval-okay-attrs3.nix rename to tests/functional/lang/eval-okay-attrs3.nix diff --git a/tests/lang/eval-okay-attrs4.exp b/tests/functional/lang/eval-okay-attrs4.exp similarity index 100% rename from tests/lang/eval-okay-attrs4.exp rename to tests/functional/lang/eval-okay-attrs4.exp diff --git a/tests/lang/eval-okay-attrs4.nix b/tests/functional/lang/eval-okay-attrs4.nix similarity index 100% rename from tests/lang/eval-okay-attrs4.nix rename to tests/functional/lang/eval-okay-attrs4.nix diff --git a/tests/lang/eval-okay-attrs5.exp b/tests/functional/lang/eval-okay-attrs5.exp similarity index 100% rename from tests/lang/eval-okay-attrs5.exp rename to tests/functional/lang/eval-okay-attrs5.exp diff --git a/tests/lang/eval-okay-attrs5.nix b/tests/functional/lang/eval-okay-attrs5.nix similarity index 100% rename from tests/lang/eval-okay-attrs5.nix rename to tests/functional/lang/eval-okay-attrs5.nix diff --git a/tests/lang/eval-okay-attrs6.exp b/tests/functional/lang/eval-okay-attrs6.exp similarity index 100% rename from tests/lang/eval-okay-attrs6.exp rename to tests/functional/lang/eval-okay-attrs6.exp diff --git a/tests/lang/eval-okay-attrs6.nix b/tests/functional/lang/eval-okay-attrs6.nix similarity index 100% rename from tests/lang/eval-okay-attrs6.nix rename to tests/functional/lang/eval-okay-attrs6.nix diff --git a/tests/lang/eval-okay-autoargs.exp b/tests/functional/lang/eval-okay-autoargs.exp similarity index 100% rename from tests/lang/eval-okay-autoargs.exp rename to tests/functional/lang/eval-okay-autoargs.exp diff --git a/tests/lang/eval-okay-autoargs.flags b/tests/functional/lang/eval-okay-autoargs.flags similarity index 100% rename from tests/lang/eval-okay-autoargs.flags rename to tests/functional/lang/eval-okay-autoargs.flags diff --git a/tests/lang/eval-okay-autoargs.nix b/tests/functional/lang/eval-okay-autoargs.nix similarity index 100% rename from tests/lang/eval-okay-autoargs.nix rename to tests/functional/lang/eval-okay-autoargs.nix diff --git a/tests/lang/eval-okay-backslash-newline-1.exp b/tests/functional/lang/eval-okay-backslash-newline-1.exp similarity index 100% rename from tests/lang/eval-okay-backslash-newline-1.exp rename to tests/functional/lang/eval-okay-backslash-newline-1.exp diff --git a/tests/lang/eval-okay-backslash-newline-1.nix b/tests/functional/lang/eval-okay-backslash-newline-1.nix similarity index 100% rename from tests/lang/eval-okay-backslash-newline-1.nix rename to tests/functional/lang/eval-okay-backslash-newline-1.nix diff --git a/tests/lang/eval-okay-backslash-newline-2.exp b/tests/functional/lang/eval-okay-backslash-newline-2.exp similarity index 100% rename from tests/lang/eval-okay-backslash-newline-2.exp rename to tests/functional/lang/eval-okay-backslash-newline-2.exp diff --git a/tests/lang/eval-okay-backslash-newline-2.nix b/tests/functional/lang/eval-okay-backslash-newline-2.nix similarity index 100% rename from tests/lang/eval-okay-backslash-newline-2.nix rename to tests/functional/lang/eval-okay-backslash-newline-2.nix diff --git a/tests/lang/eval-okay-builtins-add.exp b/tests/functional/lang/eval-okay-builtins-add.exp similarity index 100% rename from tests/lang/eval-okay-builtins-add.exp rename to tests/functional/lang/eval-okay-builtins-add.exp diff --git a/tests/lang/eval-okay-builtins-add.nix b/tests/functional/lang/eval-okay-builtins-add.nix similarity index 100% rename from tests/lang/eval-okay-builtins-add.nix rename to tests/functional/lang/eval-okay-builtins-add.nix diff --git a/tests/lang/eval-okay-builtins.exp b/tests/functional/lang/eval-okay-builtins.exp similarity index 100% rename from tests/lang/eval-okay-builtins.exp rename to tests/functional/lang/eval-okay-builtins.exp diff --git a/tests/lang/eval-okay-builtins.nix b/tests/functional/lang/eval-okay-builtins.nix similarity index 100% rename from tests/lang/eval-okay-builtins.nix rename to tests/functional/lang/eval-okay-builtins.nix diff --git a/tests/lang/eval-okay-callable-attrs.exp b/tests/functional/lang/eval-okay-callable-attrs.exp similarity index 100% rename from tests/lang/eval-okay-callable-attrs.exp rename to tests/functional/lang/eval-okay-callable-attrs.exp diff --git a/tests/lang/eval-okay-callable-attrs.nix b/tests/functional/lang/eval-okay-callable-attrs.nix similarity index 100% rename from tests/lang/eval-okay-callable-attrs.nix rename to tests/functional/lang/eval-okay-callable-attrs.nix diff --git a/tests/lang/eval-okay-catattrs.exp b/tests/functional/lang/eval-okay-catattrs.exp similarity index 100% rename from tests/lang/eval-okay-catattrs.exp rename to tests/functional/lang/eval-okay-catattrs.exp diff --git a/tests/lang/eval-okay-catattrs.nix b/tests/functional/lang/eval-okay-catattrs.nix similarity index 100% rename from tests/lang/eval-okay-catattrs.nix rename to tests/functional/lang/eval-okay-catattrs.nix diff --git a/tests/lang/eval-okay-closure.exp b/tests/functional/lang/eval-okay-closure.exp similarity index 100% rename from tests/lang/eval-okay-closure.exp rename to tests/functional/lang/eval-okay-closure.exp diff --git a/tests/lang/eval-okay-closure.exp.xml b/tests/functional/lang/eval-okay-closure.exp.xml similarity index 100% rename from tests/lang/eval-okay-closure.exp.xml rename to tests/functional/lang/eval-okay-closure.exp.xml diff --git a/tests/lang/eval-okay-closure.nix b/tests/functional/lang/eval-okay-closure.nix similarity index 100% rename from tests/lang/eval-okay-closure.nix rename to tests/functional/lang/eval-okay-closure.nix diff --git a/tests/lang/eval-okay-comments.exp b/tests/functional/lang/eval-okay-comments.exp similarity index 100% rename from tests/lang/eval-okay-comments.exp rename to tests/functional/lang/eval-okay-comments.exp diff --git a/tests/lang/eval-okay-comments.nix b/tests/functional/lang/eval-okay-comments.nix similarity index 100% rename from tests/lang/eval-okay-comments.nix rename to tests/functional/lang/eval-okay-comments.nix diff --git a/tests/lang/eval-okay-concat.exp b/tests/functional/lang/eval-okay-concat.exp similarity index 100% rename from tests/lang/eval-okay-concat.exp rename to tests/functional/lang/eval-okay-concat.exp diff --git a/tests/lang/eval-okay-concat.nix b/tests/functional/lang/eval-okay-concat.nix similarity index 100% rename from tests/lang/eval-okay-concat.nix rename to tests/functional/lang/eval-okay-concat.nix diff --git a/tests/lang/eval-okay-concatmap.exp b/tests/functional/lang/eval-okay-concatmap.exp similarity index 100% rename from tests/lang/eval-okay-concatmap.exp rename to tests/functional/lang/eval-okay-concatmap.exp diff --git a/tests/lang/eval-okay-concatmap.nix b/tests/functional/lang/eval-okay-concatmap.nix similarity index 100% rename from tests/lang/eval-okay-concatmap.nix rename to tests/functional/lang/eval-okay-concatmap.nix diff --git a/tests/lang/eval-okay-concatstringssep.exp b/tests/functional/lang/eval-okay-concatstringssep.exp similarity index 100% rename from tests/lang/eval-okay-concatstringssep.exp rename to tests/functional/lang/eval-okay-concatstringssep.exp diff --git a/tests/lang/eval-okay-concatstringssep.nix b/tests/functional/lang/eval-okay-concatstringssep.nix similarity index 100% rename from tests/lang/eval-okay-concatstringssep.nix rename to tests/functional/lang/eval-okay-concatstringssep.nix diff --git a/tests/lang/eval-okay-context-introspection.exp b/tests/functional/lang/eval-okay-context-introspection.exp similarity index 100% rename from tests/lang/eval-okay-context-introspection.exp rename to tests/functional/lang/eval-okay-context-introspection.exp diff --git a/tests/lang/eval-okay-context-introspection.nix b/tests/functional/lang/eval-okay-context-introspection.nix similarity index 100% rename from tests/lang/eval-okay-context-introspection.nix rename to tests/functional/lang/eval-okay-context-introspection.nix diff --git a/tests/lang/eval-okay-context.exp b/tests/functional/lang/eval-okay-context.exp similarity index 100% rename from tests/lang/eval-okay-context.exp rename to tests/functional/lang/eval-okay-context.exp diff --git a/tests/lang/eval-okay-context.nix b/tests/functional/lang/eval-okay-context.nix similarity index 100% rename from tests/lang/eval-okay-context.nix rename to tests/functional/lang/eval-okay-context.nix diff --git a/tests/functional/lang/eval-okay-convertHash.exp b/tests/functional/lang/eval-okay-convertHash.exp new file mode 100644 index 000000000..60e0a3c49 --- /dev/null +++ b/tests/functional/lang/eval-okay-convertHash.exp @@ -0,0 +1 @@ +{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; } diff --git a/tests/functional/lang/eval-okay-convertHash.nix b/tests/functional/lang/eval-okay-convertHash.nix new file mode 100644 index 000000000..cf4909aaf --- /dev/null +++ b/tests/functional/lang/eval-okay-convertHash.nix @@ -0,0 +1,31 @@ +let + hashAlgos = [ "md5" "md5" "md5" "sha1" "sha1" "sha1" "sha256" "sha256" "sha256" "sha512" "sha512" "sha512" ]; + hashesBase16 = import ./eval-okay-hashstring.exp; + map2 = f: { fsts, snds }: if fsts == [ ] then [ ] else [ (f (builtins.head fsts) (builtins.head snds)) ] ++ map2 f { fsts = builtins.tail fsts; snds = builtins.tail snds; }; + map2' = f: fsts: snds: map2 f { inherit fsts snds; }; + getOutputHashes = hashes: { + hashesBase16 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base16";}) hashAlgos hashes; + hashesBase32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}) hashAlgos hashes; + hashesBase64 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base64";}) hashAlgos hashes; + hashesSRI = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "sri" ;}) hashAlgos hashes; + }; + getOutputHashesColon = hashes: { + hashesBase16 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base16";}) hashAlgos hashes; + hashesBase32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base32";}) hashAlgos hashes; + hashesBase64 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base64";}) hashAlgos hashes; + hashesSRI = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "sri" ;}) hashAlgos hashes; + }; + outputHashes = getOutputHashes hashesBase16; +in +# map2'` +assert map2' (s1: s2: s1 + s2) [ "a" "b" ] [ "c" "d" ] == [ "ac" "bd" ]; +# hashesBase16 +assert outputHashes.hashesBase16 == hashesBase16; +# standard SRI hashes +assert outputHashes.hashesSRI == (map2' (hashAlgo: hashBody: hashAlgo + "-" + hashBody) hashAlgos outputHashes.hashesBase64); +# without prefix +assert builtins.all (x: getOutputHashes x == outputHashes) (builtins.attrValues outputHashes); +# colon-separated. +# Note that colon prefix must not be applied to the standard SRI. e.g. "sha256:sha256-..." is illegal. +assert builtins.all (x: getOutputHashesColon x == outputHashes) (with outputHashes; [ hashesBase16 hashesBase32 hashesBase64 ]); +outputHashes diff --git a/tests/lang/eval-okay-curpos.exp b/tests/functional/lang/eval-okay-curpos.exp similarity index 100% rename from tests/lang/eval-okay-curpos.exp rename to tests/functional/lang/eval-okay-curpos.exp diff --git a/tests/lang/eval-okay-curpos.nix b/tests/functional/lang/eval-okay-curpos.nix similarity index 100% rename from tests/lang/eval-okay-curpos.nix rename to tests/functional/lang/eval-okay-curpos.nix diff --git a/tests/lang/eval-okay-deepseq.exp b/tests/functional/lang/eval-okay-deepseq.exp similarity index 100% rename from tests/lang/eval-okay-deepseq.exp rename to tests/functional/lang/eval-okay-deepseq.exp diff --git a/tests/lang/eval-okay-deepseq.nix b/tests/functional/lang/eval-okay-deepseq.nix similarity index 100% rename from tests/lang/eval-okay-deepseq.nix rename to tests/functional/lang/eval-okay-deepseq.nix diff --git a/tests/lang/eval-okay-delayed-with-inherit.exp b/tests/functional/lang/eval-okay-delayed-with-inherit.exp similarity index 100% rename from tests/lang/eval-okay-delayed-with-inherit.exp rename to tests/functional/lang/eval-okay-delayed-with-inherit.exp diff --git a/tests/lang/eval-okay-delayed-with-inherit.nix b/tests/functional/lang/eval-okay-delayed-with-inherit.nix similarity index 100% rename from tests/lang/eval-okay-delayed-with-inherit.nix rename to tests/functional/lang/eval-okay-delayed-with-inherit.nix diff --git a/tests/lang/eval-okay-delayed-with.exp b/tests/functional/lang/eval-okay-delayed-with.exp similarity index 100% rename from tests/lang/eval-okay-delayed-with.exp rename to tests/functional/lang/eval-okay-delayed-with.exp diff --git a/tests/lang/eval-okay-delayed-with.nix b/tests/functional/lang/eval-okay-delayed-with.nix similarity index 100% rename from tests/lang/eval-okay-delayed-with.nix rename to tests/functional/lang/eval-okay-delayed-with.nix diff --git a/tests/lang/eval-okay-dynamic-attrs-2.exp b/tests/functional/lang/eval-okay-dynamic-attrs-2.exp similarity index 100% rename from tests/lang/eval-okay-dynamic-attrs-2.exp rename to tests/functional/lang/eval-okay-dynamic-attrs-2.exp diff --git a/tests/lang/eval-okay-dynamic-attrs-2.nix b/tests/functional/lang/eval-okay-dynamic-attrs-2.nix similarity index 100% rename from tests/lang/eval-okay-dynamic-attrs-2.nix rename to tests/functional/lang/eval-okay-dynamic-attrs-2.nix diff --git a/tests/lang/eval-okay-dynamic-attrs-bare.exp b/tests/functional/lang/eval-okay-dynamic-attrs-bare.exp similarity index 100% rename from tests/lang/eval-okay-dynamic-attrs-bare.exp rename to tests/functional/lang/eval-okay-dynamic-attrs-bare.exp diff --git a/tests/lang/eval-okay-dynamic-attrs-bare.nix b/tests/functional/lang/eval-okay-dynamic-attrs-bare.nix similarity index 100% rename from tests/lang/eval-okay-dynamic-attrs-bare.nix rename to tests/functional/lang/eval-okay-dynamic-attrs-bare.nix diff --git a/tests/lang/eval-okay-dynamic-attrs.exp b/tests/functional/lang/eval-okay-dynamic-attrs.exp similarity index 100% rename from tests/lang/eval-okay-dynamic-attrs.exp rename to tests/functional/lang/eval-okay-dynamic-attrs.exp diff --git a/tests/lang/eval-okay-dynamic-attrs.nix b/tests/functional/lang/eval-okay-dynamic-attrs.nix similarity index 100% rename from tests/lang/eval-okay-dynamic-attrs.nix rename to tests/functional/lang/eval-okay-dynamic-attrs.nix diff --git a/tests/lang/eval-okay-elem.exp b/tests/functional/lang/eval-okay-elem.exp similarity index 100% rename from tests/lang/eval-okay-elem.exp rename to tests/functional/lang/eval-okay-elem.exp diff --git a/tests/lang/eval-okay-elem.nix b/tests/functional/lang/eval-okay-elem.nix similarity index 100% rename from tests/lang/eval-okay-elem.nix rename to tests/functional/lang/eval-okay-elem.nix diff --git a/tests/lang/eval-okay-empty-args.exp b/tests/functional/lang/eval-okay-empty-args.exp similarity index 100% rename from tests/lang/eval-okay-empty-args.exp rename to tests/functional/lang/eval-okay-empty-args.exp diff --git a/tests/lang/eval-okay-empty-args.nix b/tests/functional/lang/eval-okay-empty-args.nix similarity index 100% rename from tests/lang/eval-okay-empty-args.nix rename to tests/functional/lang/eval-okay-empty-args.nix diff --git a/tests/lang/eval-okay-eq-derivations.exp b/tests/functional/lang/eval-okay-eq-derivations.exp similarity index 100% rename from tests/lang/eval-okay-eq-derivations.exp rename to tests/functional/lang/eval-okay-eq-derivations.exp diff --git a/tests/lang/eval-okay-eq-derivations.nix b/tests/functional/lang/eval-okay-eq-derivations.nix similarity index 100% rename from tests/lang/eval-okay-eq-derivations.nix rename to tests/functional/lang/eval-okay-eq-derivations.nix diff --git a/tests/lang/eval-okay-eq.exp b/tests/functional/lang/eval-okay-eq.exp similarity index 100% rename from tests/lang/eval-okay-eq.exp rename to tests/functional/lang/eval-okay-eq.exp diff --git a/tests/lang/eval-okay-eq.nix b/tests/functional/lang/eval-okay-eq.nix similarity index 100% rename from tests/lang/eval-okay-eq.nix rename to tests/functional/lang/eval-okay-eq.nix diff --git a/tests/lang/eval-okay-filter.exp b/tests/functional/lang/eval-okay-filter.exp similarity index 100% rename from tests/lang/eval-okay-filter.exp rename to tests/functional/lang/eval-okay-filter.exp diff --git a/tests/lang/eval-okay-filter.nix b/tests/functional/lang/eval-okay-filter.nix similarity index 100% rename from tests/lang/eval-okay-filter.nix rename to tests/functional/lang/eval-okay-filter.nix diff --git a/tests/lang/eval-okay-flake-ref-to-string.exp b/tests/functional/lang/eval-okay-flake-ref-to-string.exp similarity index 100% rename from tests/lang/eval-okay-flake-ref-to-string.exp rename to tests/functional/lang/eval-okay-flake-ref-to-string.exp diff --git a/tests/lang/eval-okay-flake-ref-to-string.nix b/tests/functional/lang/eval-okay-flake-ref-to-string.nix similarity index 100% rename from tests/lang/eval-okay-flake-ref-to-string.nix rename to tests/functional/lang/eval-okay-flake-ref-to-string.nix diff --git a/tests/lang/eval-okay-flatten.exp b/tests/functional/lang/eval-okay-flatten.exp similarity index 100% rename from tests/lang/eval-okay-flatten.exp rename to tests/functional/lang/eval-okay-flatten.exp diff --git a/tests/lang/eval-okay-flatten.nix b/tests/functional/lang/eval-okay-flatten.nix similarity index 100% rename from tests/lang/eval-okay-flatten.nix rename to tests/functional/lang/eval-okay-flatten.nix diff --git a/tests/lang/eval-okay-float.exp b/tests/functional/lang/eval-okay-float.exp similarity index 100% rename from tests/lang/eval-okay-float.exp rename to tests/functional/lang/eval-okay-float.exp diff --git a/tests/lang/eval-okay-float.nix b/tests/functional/lang/eval-okay-float.nix similarity index 100% rename from tests/lang/eval-okay-float.nix rename to tests/functional/lang/eval-okay-float.nix diff --git a/tests/lang/eval-okay-floor-ceil.exp b/tests/functional/lang/eval-okay-floor-ceil.exp similarity index 100% rename from tests/lang/eval-okay-floor-ceil.exp rename to tests/functional/lang/eval-okay-floor-ceil.exp diff --git a/tests/lang/eval-okay-floor-ceil.nix b/tests/functional/lang/eval-okay-floor-ceil.nix similarity index 100% rename from tests/lang/eval-okay-floor-ceil.nix rename to tests/functional/lang/eval-okay-floor-ceil.nix diff --git a/tests/lang/eval-okay-foldlStrict-lazy-elements.exp b/tests/functional/lang/eval-okay-foldlStrict-lazy-elements.exp similarity index 100% rename from tests/lang/eval-okay-foldlStrict-lazy-elements.exp rename to tests/functional/lang/eval-okay-foldlStrict-lazy-elements.exp diff --git a/tests/lang/eval-okay-foldlStrict-lazy-elements.nix b/tests/functional/lang/eval-okay-foldlStrict-lazy-elements.nix similarity index 100% rename from tests/lang/eval-okay-foldlStrict-lazy-elements.nix rename to tests/functional/lang/eval-okay-foldlStrict-lazy-elements.nix diff --git a/tests/lang/eval-okay-foldlStrict-lazy-initial-accumulator.exp b/tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.exp similarity index 100% rename from tests/lang/eval-okay-foldlStrict-lazy-initial-accumulator.exp rename to tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.exp diff --git a/tests/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix b/tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix similarity index 100% rename from tests/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix rename to tests/functional/lang/eval-okay-foldlStrict-lazy-initial-accumulator.nix diff --git a/tests/lang/eval-okay-foldlStrict.exp b/tests/functional/lang/eval-okay-foldlStrict.exp similarity index 100% rename from tests/lang/eval-okay-foldlStrict.exp rename to tests/functional/lang/eval-okay-foldlStrict.exp diff --git a/tests/lang/eval-okay-foldlStrict.nix b/tests/functional/lang/eval-okay-foldlStrict.nix similarity index 100% rename from tests/lang/eval-okay-foldlStrict.nix rename to tests/functional/lang/eval-okay-foldlStrict.nix diff --git a/tests/lang/eval-okay-fromTOML-timestamps.exp b/tests/functional/lang/eval-okay-fromTOML-timestamps.exp similarity index 100% rename from tests/lang/eval-okay-fromTOML-timestamps.exp rename to tests/functional/lang/eval-okay-fromTOML-timestamps.exp diff --git a/tests/lang/eval-okay-fromTOML-timestamps.flags b/tests/functional/lang/eval-okay-fromTOML-timestamps.flags similarity index 100% rename from tests/lang/eval-okay-fromTOML-timestamps.flags rename to tests/functional/lang/eval-okay-fromTOML-timestamps.flags diff --git a/tests/lang/eval-okay-fromTOML-timestamps.nix b/tests/functional/lang/eval-okay-fromTOML-timestamps.nix similarity index 100% rename from tests/lang/eval-okay-fromTOML-timestamps.nix rename to tests/functional/lang/eval-okay-fromTOML-timestamps.nix diff --git a/tests/lang/eval-okay-fromTOML.exp b/tests/functional/lang/eval-okay-fromTOML.exp similarity index 100% rename from tests/lang/eval-okay-fromTOML.exp rename to tests/functional/lang/eval-okay-fromTOML.exp diff --git a/tests/lang/eval-okay-fromTOML.nix b/tests/functional/lang/eval-okay-fromTOML.nix similarity index 100% rename from tests/lang/eval-okay-fromTOML.nix rename to tests/functional/lang/eval-okay-fromTOML.nix diff --git a/tests/lang/eval-okay-fromjson-escapes.exp b/tests/functional/lang/eval-okay-fromjson-escapes.exp similarity index 100% rename from tests/lang/eval-okay-fromjson-escapes.exp rename to tests/functional/lang/eval-okay-fromjson-escapes.exp diff --git a/tests/lang/eval-okay-fromjson-escapes.nix b/tests/functional/lang/eval-okay-fromjson-escapes.nix similarity index 100% rename from tests/lang/eval-okay-fromjson-escapes.nix rename to tests/functional/lang/eval-okay-fromjson-escapes.nix diff --git a/tests/lang/eval-okay-fromjson.exp b/tests/functional/lang/eval-okay-fromjson.exp similarity index 100% rename from tests/lang/eval-okay-fromjson.exp rename to tests/functional/lang/eval-okay-fromjson.exp diff --git a/tests/lang/eval-okay-fromjson.nix b/tests/functional/lang/eval-okay-fromjson.nix similarity index 100% rename from tests/lang/eval-okay-fromjson.nix rename to tests/functional/lang/eval-okay-fromjson.nix diff --git a/tests/lang/eval-okay-functionargs.exp b/tests/functional/lang/eval-okay-functionargs.exp similarity index 100% rename from tests/lang/eval-okay-functionargs.exp rename to tests/functional/lang/eval-okay-functionargs.exp diff --git a/tests/lang/eval-okay-functionargs.exp.xml b/tests/functional/lang/eval-okay-functionargs.exp.xml similarity index 100% rename from tests/lang/eval-okay-functionargs.exp.xml rename to tests/functional/lang/eval-okay-functionargs.exp.xml diff --git a/tests/lang/eval-okay-functionargs.nix b/tests/functional/lang/eval-okay-functionargs.nix similarity index 100% rename from tests/lang/eval-okay-functionargs.nix rename to tests/functional/lang/eval-okay-functionargs.nix diff --git a/tests/lang/eval-okay-getattrpos-functionargs.exp b/tests/functional/lang/eval-okay-getattrpos-functionargs.exp similarity index 100% rename from tests/lang/eval-okay-getattrpos-functionargs.exp rename to tests/functional/lang/eval-okay-getattrpos-functionargs.exp diff --git a/tests/lang/eval-okay-getattrpos-functionargs.nix b/tests/functional/lang/eval-okay-getattrpos-functionargs.nix similarity index 100% rename from tests/lang/eval-okay-getattrpos-functionargs.nix rename to tests/functional/lang/eval-okay-getattrpos-functionargs.nix diff --git a/tests/lang/eval-okay-getattrpos-undefined.exp b/tests/functional/lang/eval-okay-getattrpos-undefined.exp similarity index 100% rename from tests/lang/eval-okay-getattrpos-undefined.exp rename to tests/functional/lang/eval-okay-getattrpos-undefined.exp diff --git a/tests/lang/eval-okay-getattrpos-undefined.nix b/tests/functional/lang/eval-okay-getattrpos-undefined.nix similarity index 100% rename from tests/lang/eval-okay-getattrpos-undefined.nix rename to tests/functional/lang/eval-okay-getattrpos-undefined.nix diff --git a/tests/lang/eval-okay-getattrpos.exp b/tests/functional/lang/eval-okay-getattrpos.exp similarity index 100% rename from tests/lang/eval-okay-getattrpos.exp rename to tests/functional/lang/eval-okay-getattrpos.exp diff --git a/tests/lang/eval-okay-getattrpos.nix b/tests/functional/lang/eval-okay-getattrpos.nix similarity index 100% rename from tests/lang/eval-okay-getattrpos.nix rename to tests/functional/lang/eval-okay-getattrpos.nix diff --git a/tests/lang/eval-okay-getenv.exp b/tests/functional/lang/eval-okay-getenv.exp similarity index 100% rename from tests/lang/eval-okay-getenv.exp rename to tests/functional/lang/eval-okay-getenv.exp diff --git a/tests/lang/eval-okay-getenv.nix b/tests/functional/lang/eval-okay-getenv.nix similarity index 100% rename from tests/lang/eval-okay-getenv.nix rename to tests/functional/lang/eval-okay-getenv.nix diff --git a/tests/lang/eval-okay-groupBy.exp b/tests/functional/lang/eval-okay-groupBy.exp similarity index 100% rename from tests/lang/eval-okay-groupBy.exp rename to tests/functional/lang/eval-okay-groupBy.exp diff --git a/tests/lang/eval-okay-groupBy.nix b/tests/functional/lang/eval-okay-groupBy.nix similarity index 100% rename from tests/lang/eval-okay-groupBy.nix rename to tests/functional/lang/eval-okay-groupBy.nix diff --git a/tests/lang/eval-okay-hash.exp b/tests/functional/lang/eval-okay-hash.exp similarity index 100% rename from tests/lang/eval-okay-hash.exp rename to tests/functional/lang/eval-okay-hash.exp diff --git a/tests/lang/eval-okay-hashfile.exp b/tests/functional/lang/eval-okay-hashfile.exp similarity index 100% rename from tests/lang/eval-okay-hashfile.exp rename to tests/functional/lang/eval-okay-hashfile.exp diff --git a/tests/lang/eval-okay-hashfile.nix b/tests/functional/lang/eval-okay-hashfile.nix similarity index 100% rename from tests/lang/eval-okay-hashfile.nix rename to tests/functional/lang/eval-okay-hashfile.nix diff --git a/tests/lang/eval-okay-hashstring.exp b/tests/functional/lang/eval-okay-hashstring.exp similarity index 100% rename from tests/lang/eval-okay-hashstring.exp rename to tests/functional/lang/eval-okay-hashstring.exp diff --git a/tests/lang/eval-okay-hashstring.nix b/tests/functional/lang/eval-okay-hashstring.nix similarity index 100% rename from tests/lang/eval-okay-hashstring.nix rename to tests/functional/lang/eval-okay-hashstring.nix diff --git a/tests/lang/eval-okay-if.exp b/tests/functional/lang/eval-okay-if.exp similarity index 100% rename from tests/lang/eval-okay-if.exp rename to tests/functional/lang/eval-okay-if.exp diff --git a/tests/lang/eval-okay-if.nix b/tests/functional/lang/eval-okay-if.nix similarity index 100% rename from tests/lang/eval-okay-if.nix rename to tests/functional/lang/eval-okay-if.nix diff --git a/tests/lang/eval-okay-import.exp b/tests/functional/lang/eval-okay-import.exp similarity index 100% rename from tests/lang/eval-okay-import.exp rename to tests/functional/lang/eval-okay-import.exp diff --git a/tests/lang/eval-okay-import.nix b/tests/functional/lang/eval-okay-import.nix similarity index 100% rename from tests/lang/eval-okay-import.nix rename to tests/functional/lang/eval-okay-import.nix diff --git a/tests/lang/eval-okay-ind-string.exp b/tests/functional/lang/eval-okay-ind-string.exp similarity index 100% rename from tests/lang/eval-okay-ind-string.exp rename to tests/functional/lang/eval-okay-ind-string.exp diff --git a/tests/lang/eval-okay-ind-string.nix b/tests/functional/lang/eval-okay-ind-string.nix similarity index 100% rename from tests/lang/eval-okay-ind-string.nix rename to tests/functional/lang/eval-okay-ind-string.nix diff --git a/tests/lang/eval-okay-intersectAttrs.exp b/tests/functional/lang/eval-okay-intersectAttrs.exp similarity index 100% rename from tests/lang/eval-okay-intersectAttrs.exp rename to tests/functional/lang/eval-okay-intersectAttrs.exp diff --git a/tests/lang/eval-okay-intersectAttrs.nix b/tests/functional/lang/eval-okay-intersectAttrs.nix similarity index 100% rename from tests/lang/eval-okay-intersectAttrs.nix rename to tests/functional/lang/eval-okay-intersectAttrs.nix diff --git a/tests/lang/eval-okay-let.exp b/tests/functional/lang/eval-okay-let.exp similarity index 100% rename from tests/lang/eval-okay-let.exp rename to tests/functional/lang/eval-okay-let.exp diff --git a/tests/lang/eval-okay-let.nix b/tests/functional/lang/eval-okay-let.nix similarity index 100% rename from tests/lang/eval-okay-let.nix rename to tests/functional/lang/eval-okay-let.nix diff --git a/tests/lang/eval-okay-list.exp b/tests/functional/lang/eval-okay-list.exp similarity index 100% rename from tests/lang/eval-okay-list.exp rename to tests/functional/lang/eval-okay-list.exp diff --git a/tests/lang/eval-okay-list.nix b/tests/functional/lang/eval-okay-list.nix similarity index 100% rename from tests/lang/eval-okay-list.nix rename to tests/functional/lang/eval-okay-list.nix diff --git a/tests/lang/eval-okay-listtoattrs.exp b/tests/functional/lang/eval-okay-listtoattrs.exp similarity index 100% rename from tests/lang/eval-okay-listtoattrs.exp rename to tests/functional/lang/eval-okay-listtoattrs.exp diff --git a/tests/lang/eval-okay-listtoattrs.nix b/tests/functional/lang/eval-okay-listtoattrs.nix similarity index 100% rename from tests/lang/eval-okay-listtoattrs.nix rename to tests/functional/lang/eval-okay-listtoattrs.nix diff --git a/tests/lang/eval-okay-logic.exp b/tests/functional/lang/eval-okay-logic.exp similarity index 100% rename from tests/lang/eval-okay-logic.exp rename to tests/functional/lang/eval-okay-logic.exp diff --git a/tests/lang/eval-okay-logic.nix b/tests/functional/lang/eval-okay-logic.nix similarity index 100% rename from tests/lang/eval-okay-logic.nix rename to tests/functional/lang/eval-okay-logic.nix diff --git a/tests/lang/eval-okay-map.exp b/tests/functional/lang/eval-okay-map.exp similarity index 100% rename from tests/lang/eval-okay-map.exp rename to tests/functional/lang/eval-okay-map.exp diff --git a/tests/lang/eval-okay-map.nix b/tests/functional/lang/eval-okay-map.nix similarity index 100% rename from tests/lang/eval-okay-map.nix rename to tests/functional/lang/eval-okay-map.nix diff --git a/tests/lang/eval-okay-mapattrs.exp b/tests/functional/lang/eval-okay-mapattrs.exp similarity index 100% rename from tests/lang/eval-okay-mapattrs.exp rename to tests/functional/lang/eval-okay-mapattrs.exp diff --git a/tests/lang/eval-okay-mapattrs.nix b/tests/functional/lang/eval-okay-mapattrs.nix similarity index 100% rename from tests/lang/eval-okay-mapattrs.nix rename to tests/functional/lang/eval-okay-mapattrs.nix diff --git a/tests/lang/eval-okay-merge-dynamic-attrs.exp b/tests/functional/lang/eval-okay-merge-dynamic-attrs.exp similarity index 100% rename from tests/lang/eval-okay-merge-dynamic-attrs.exp rename to tests/functional/lang/eval-okay-merge-dynamic-attrs.exp diff --git a/tests/lang/eval-okay-merge-dynamic-attrs.nix b/tests/functional/lang/eval-okay-merge-dynamic-attrs.nix similarity index 100% rename from tests/lang/eval-okay-merge-dynamic-attrs.nix rename to tests/functional/lang/eval-okay-merge-dynamic-attrs.nix diff --git a/tests/lang/eval-okay-nested-with.exp b/tests/functional/lang/eval-okay-nested-with.exp similarity index 100% rename from tests/lang/eval-okay-nested-with.exp rename to tests/functional/lang/eval-okay-nested-with.exp diff --git a/tests/lang/eval-okay-nested-with.nix b/tests/functional/lang/eval-okay-nested-with.nix similarity index 100% rename from tests/lang/eval-okay-nested-with.nix rename to tests/functional/lang/eval-okay-nested-with.nix diff --git a/tests/lang/eval-okay-new-let.exp b/tests/functional/lang/eval-okay-new-let.exp similarity index 100% rename from tests/lang/eval-okay-new-let.exp rename to tests/functional/lang/eval-okay-new-let.exp diff --git a/tests/lang/eval-okay-new-let.nix b/tests/functional/lang/eval-okay-new-let.nix similarity index 100% rename from tests/lang/eval-okay-new-let.nix rename to tests/functional/lang/eval-okay-new-let.nix diff --git a/tests/lang/eval-okay-null-dynamic-attrs.exp b/tests/functional/lang/eval-okay-null-dynamic-attrs.exp similarity index 100% rename from tests/lang/eval-okay-null-dynamic-attrs.exp rename to tests/functional/lang/eval-okay-null-dynamic-attrs.exp diff --git a/tests/lang/eval-okay-null-dynamic-attrs.nix b/tests/functional/lang/eval-okay-null-dynamic-attrs.nix similarity index 100% rename from tests/lang/eval-okay-null-dynamic-attrs.nix rename to tests/functional/lang/eval-okay-null-dynamic-attrs.nix diff --git a/tests/lang/eval-okay-overrides.exp b/tests/functional/lang/eval-okay-overrides.exp similarity index 100% rename from tests/lang/eval-okay-overrides.exp rename to tests/functional/lang/eval-okay-overrides.exp diff --git a/tests/lang/eval-okay-overrides.nix b/tests/functional/lang/eval-okay-overrides.nix similarity index 100% rename from tests/lang/eval-okay-overrides.nix rename to tests/functional/lang/eval-okay-overrides.nix diff --git a/tests/lang/eval-okay-parse-flake-ref.exp b/tests/functional/lang/eval-okay-parse-flake-ref.exp similarity index 100% rename from tests/lang/eval-okay-parse-flake-ref.exp rename to tests/functional/lang/eval-okay-parse-flake-ref.exp diff --git a/tests/lang/eval-okay-parse-flake-ref.nix b/tests/functional/lang/eval-okay-parse-flake-ref.nix similarity index 100% rename from tests/lang/eval-okay-parse-flake-ref.nix rename to tests/functional/lang/eval-okay-parse-flake-ref.nix diff --git a/tests/lang/eval-okay-partition.exp b/tests/functional/lang/eval-okay-partition.exp similarity index 100% rename from tests/lang/eval-okay-partition.exp rename to tests/functional/lang/eval-okay-partition.exp diff --git a/tests/lang/eval-okay-partition.nix b/tests/functional/lang/eval-okay-partition.nix similarity index 100% rename from tests/lang/eval-okay-partition.nix rename to tests/functional/lang/eval-okay-partition.nix diff --git a/tests/lang/eval-okay-path-string-interpolation.exp b/tests/functional/lang/eval-okay-path-string-interpolation.exp similarity index 100% rename from tests/lang/eval-okay-path-string-interpolation.exp rename to tests/functional/lang/eval-okay-path-string-interpolation.exp diff --git a/tests/lang/eval-okay-path-string-interpolation.nix b/tests/functional/lang/eval-okay-path-string-interpolation.nix similarity index 100% rename from tests/lang/eval-okay-path-string-interpolation.nix rename to tests/functional/lang/eval-okay-path-string-interpolation.nix diff --git a/tests/functional/lang/eval-okay-path.exp b/tests/functional/lang/eval-okay-path.exp new file mode 100644 index 000000000..635e2243a --- /dev/null +++ b/tests/functional/lang/eval-okay-path.exp @@ -0,0 +1 @@ +[ "/nix/store/ya937r4ydw0l6kayq8jkyqaips9c75jm-output" "/nix/store/m7y372g6jb0g4hh1dzmj847rd356fhnz-output" ] diff --git a/tests/functional/lang/eval-okay-path.nix b/tests/functional/lang/eval-okay-path.nix new file mode 100644 index 000000000..599b33541 --- /dev/null +++ b/tests/functional/lang/eval-okay-path.nix @@ -0,0 +1,15 @@ +[ + (builtins.path + { path = ./.; + filter = path: _: baseNameOf path == "data"; + recursive = true; + sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; + name = "output"; + }) + (builtins.path + { path = ./data; + recursive = false; + sha256 = "0k4lwj58f2w5yh92ilrwy9917pycipbrdrr13vbb3yd02j09vfxm"; + name = "output"; + }) +] diff --git a/tests/lang/eval-okay-pathexists.exp b/tests/functional/lang/eval-okay-pathexists.exp similarity index 100% rename from tests/lang/eval-okay-pathexists.exp rename to tests/functional/lang/eval-okay-pathexists.exp diff --git a/tests/functional/lang/eval-okay-pathexists.nix b/tests/functional/lang/eval-okay-pathexists.nix new file mode 100644 index 000000000..31697f66a --- /dev/null +++ b/tests/functional/lang/eval-okay-pathexists.nix @@ -0,0 +1,31 @@ +builtins.pathExists (./lib.nix) +&& builtins.pathExists (builtins.toPath ./lib.nix) +&& builtins.pathExists (builtins.toString ./lib.nix) +&& !builtins.pathExists (builtins.toString ./lib.nix + "/") +&& !builtins.pathExists (builtins.toString ./lib.nix + "/.") +# FIXME +# && !builtins.pathExists (builtins.toString ./lib.nix + "/..") +# && !builtins.pathExists (builtins.toString ./lib.nix + "/a/..") +# && !builtins.pathExists (builtins.toString ./lib.nix + "/../lib.nix") +&& !builtins.pathExists (builtins.toString ./lib.nix + "/./") +&& !builtins.pathExists (builtins.toString ./lib.nix + "/./.") +&& builtins.pathExists (builtins.toString ./.. + "/lang/lib.nix") +&& !builtins.pathExists (builtins.toString ./.. + "lang/lib.nix") +&& builtins.pathExists (builtins.toString ./. + "/../lang/lib.nix") +&& builtins.pathExists (builtins.toString ./. + "/../lang/./lib.nix") +&& builtins.pathExists (builtins.toString ./.) +&& builtins.pathExists (builtins.toString ./. + "/") +&& builtins.pathExists (builtins.toString ./. + "/../lang") +&& builtins.pathExists (builtins.toString ./. + "/../lang/") +&& builtins.pathExists (builtins.toString ./. + "/../lang/.") +&& builtins.pathExists (builtins.toString ./. + "/../lang/./") +&& builtins.pathExists (builtins.toString ./. + "/../lang//./") +&& builtins.pathExists (builtins.toString ./. + "/../lang/..") +&& builtins.pathExists (builtins.toString ./. + "/../lang/../") +&& builtins.pathExists (builtins.toString ./. + "/../lang/..//") +&& builtins.pathExists (builtins.toPath (builtins.toString ./lib.nix)) +&& !builtins.pathExists (builtins.toPath (builtins.toString ./bla.nix)) +&& builtins.pathExists (builtins.toPath { __toString = x: builtins.toString ./lib.nix; }) +&& builtins.pathExists (builtins.toPath { outPath = builtins.toString ./lib.nix; }) +&& builtins.pathExists ./lib.nix +&& !builtins.pathExists ./bla.nix diff --git a/tests/lang/eval-okay-patterns.exp b/tests/functional/lang/eval-okay-patterns.exp similarity index 100% rename from tests/lang/eval-okay-patterns.exp rename to tests/functional/lang/eval-okay-patterns.exp diff --git a/tests/lang/eval-okay-patterns.nix b/tests/functional/lang/eval-okay-patterns.nix similarity index 100% rename from tests/lang/eval-okay-patterns.nix rename to tests/functional/lang/eval-okay-patterns.nix diff --git a/tests/lang/eval-okay-print.err.exp b/tests/functional/lang/eval-okay-print.err.exp similarity index 100% rename from tests/lang/eval-okay-print.err.exp rename to tests/functional/lang/eval-okay-print.err.exp diff --git a/tests/lang/eval-okay-print.exp b/tests/functional/lang/eval-okay-print.exp similarity index 100% rename from tests/lang/eval-okay-print.exp rename to tests/functional/lang/eval-okay-print.exp diff --git a/tests/lang/eval-okay-print.nix b/tests/functional/lang/eval-okay-print.nix similarity index 100% rename from tests/lang/eval-okay-print.nix rename to tests/functional/lang/eval-okay-print.nix diff --git a/tests/lang/eval-okay-readDir.exp b/tests/functional/lang/eval-okay-readDir.exp similarity index 100% rename from tests/lang/eval-okay-readDir.exp rename to tests/functional/lang/eval-okay-readDir.exp diff --git a/tests/lang/eval-okay-readDir.nix b/tests/functional/lang/eval-okay-readDir.nix similarity index 100% rename from tests/lang/eval-okay-readDir.nix rename to tests/functional/lang/eval-okay-readDir.nix diff --git a/tests/lang/eval-okay-readFileType.exp b/tests/functional/lang/eval-okay-readFileType.exp similarity index 100% rename from tests/lang/eval-okay-readFileType.exp rename to tests/functional/lang/eval-okay-readFileType.exp diff --git a/tests/lang/eval-okay-readFileType.nix b/tests/functional/lang/eval-okay-readFileType.nix similarity index 100% rename from tests/lang/eval-okay-readFileType.nix rename to tests/functional/lang/eval-okay-readFileType.nix diff --git a/tests/lang/eval-okay-readfile.exp b/tests/functional/lang/eval-okay-readfile.exp similarity index 100% rename from tests/lang/eval-okay-readfile.exp rename to tests/functional/lang/eval-okay-readfile.exp diff --git a/tests/lang/eval-okay-readfile.nix b/tests/functional/lang/eval-okay-readfile.nix similarity index 100% rename from tests/lang/eval-okay-readfile.nix rename to tests/functional/lang/eval-okay-readfile.nix diff --git a/tests/lang/eval-okay-redefine-builtin.exp b/tests/functional/lang/eval-okay-redefine-builtin.exp similarity index 100% rename from tests/lang/eval-okay-redefine-builtin.exp rename to tests/functional/lang/eval-okay-redefine-builtin.exp diff --git a/tests/lang/eval-okay-redefine-builtin.nix b/tests/functional/lang/eval-okay-redefine-builtin.nix similarity index 100% rename from tests/lang/eval-okay-redefine-builtin.nix rename to tests/functional/lang/eval-okay-redefine-builtin.nix diff --git a/tests/lang/eval-okay-regex-match.exp b/tests/functional/lang/eval-okay-regex-match.exp similarity index 100% rename from tests/lang/eval-okay-regex-match.exp rename to tests/functional/lang/eval-okay-regex-match.exp diff --git a/tests/lang/eval-okay-regex-match.nix b/tests/functional/lang/eval-okay-regex-match.nix similarity index 100% rename from tests/lang/eval-okay-regex-match.nix rename to tests/functional/lang/eval-okay-regex-match.nix diff --git a/tests/lang/eval-okay-regex-split.exp b/tests/functional/lang/eval-okay-regex-split.exp similarity index 100% rename from tests/lang/eval-okay-regex-split.exp rename to tests/functional/lang/eval-okay-regex-split.exp diff --git a/tests/lang/eval-okay-regex-split.nix b/tests/functional/lang/eval-okay-regex-split.nix similarity index 100% rename from tests/lang/eval-okay-regex-split.nix rename to tests/functional/lang/eval-okay-regex-split.nix diff --git a/tests/lang/eval-okay-regression-20220122.exp b/tests/functional/lang/eval-okay-regression-20220122.exp similarity index 100% rename from tests/lang/eval-okay-regression-20220122.exp rename to tests/functional/lang/eval-okay-regression-20220122.exp diff --git a/tests/lang/eval-okay-regression-20220122.nix b/tests/functional/lang/eval-okay-regression-20220122.nix similarity index 100% rename from tests/lang/eval-okay-regression-20220122.nix rename to tests/functional/lang/eval-okay-regression-20220122.nix diff --git a/tests/lang/eval-okay-regression-20220125.exp b/tests/functional/lang/eval-okay-regression-20220125.exp similarity index 100% rename from tests/lang/eval-okay-regression-20220125.exp rename to tests/functional/lang/eval-okay-regression-20220125.exp diff --git a/tests/lang/eval-okay-regression-20220125.nix b/tests/functional/lang/eval-okay-regression-20220125.nix similarity index 100% rename from tests/lang/eval-okay-regression-20220125.nix rename to tests/functional/lang/eval-okay-regression-20220125.nix diff --git a/tests/lang/eval-okay-remove.exp b/tests/functional/lang/eval-okay-remove.exp similarity index 100% rename from tests/lang/eval-okay-remove.exp rename to tests/functional/lang/eval-okay-remove.exp diff --git a/tests/lang/eval-okay-remove.nix b/tests/functional/lang/eval-okay-remove.nix similarity index 100% rename from tests/lang/eval-okay-remove.nix rename to tests/functional/lang/eval-okay-remove.nix diff --git a/tests/lang/eval-okay-replacestrings.exp b/tests/functional/lang/eval-okay-replacestrings.exp similarity index 100% rename from tests/lang/eval-okay-replacestrings.exp rename to tests/functional/lang/eval-okay-replacestrings.exp diff --git a/tests/lang/eval-okay-replacestrings.nix b/tests/functional/lang/eval-okay-replacestrings.nix similarity index 100% rename from tests/lang/eval-okay-replacestrings.nix rename to tests/functional/lang/eval-okay-replacestrings.nix diff --git a/tests/lang/eval-okay-scope-1.exp b/tests/functional/lang/eval-okay-scope-1.exp similarity index 100% rename from tests/lang/eval-okay-scope-1.exp rename to tests/functional/lang/eval-okay-scope-1.exp diff --git a/tests/lang/eval-okay-scope-1.nix b/tests/functional/lang/eval-okay-scope-1.nix similarity index 100% rename from tests/lang/eval-okay-scope-1.nix rename to tests/functional/lang/eval-okay-scope-1.nix diff --git a/tests/lang/eval-okay-scope-2.exp b/tests/functional/lang/eval-okay-scope-2.exp similarity index 100% rename from tests/lang/eval-okay-scope-2.exp rename to tests/functional/lang/eval-okay-scope-2.exp diff --git a/tests/lang/eval-okay-scope-2.nix b/tests/functional/lang/eval-okay-scope-2.nix similarity index 100% rename from tests/lang/eval-okay-scope-2.nix rename to tests/functional/lang/eval-okay-scope-2.nix diff --git a/tests/lang/eval-okay-scope-3.exp b/tests/functional/lang/eval-okay-scope-3.exp similarity index 100% rename from tests/lang/eval-okay-scope-3.exp rename to tests/functional/lang/eval-okay-scope-3.exp diff --git a/tests/lang/eval-okay-scope-3.nix b/tests/functional/lang/eval-okay-scope-3.nix similarity index 100% rename from tests/lang/eval-okay-scope-3.nix rename to tests/functional/lang/eval-okay-scope-3.nix diff --git a/tests/lang/eval-okay-scope-4.exp b/tests/functional/lang/eval-okay-scope-4.exp similarity index 100% rename from tests/lang/eval-okay-scope-4.exp rename to tests/functional/lang/eval-okay-scope-4.exp diff --git a/tests/lang/eval-okay-scope-4.nix b/tests/functional/lang/eval-okay-scope-4.nix similarity index 100% rename from tests/lang/eval-okay-scope-4.nix rename to tests/functional/lang/eval-okay-scope-4.nix diff --git a/tests/lang/eval-okay-scope-6.exp b/tests/functional/lang/eval-okay-scope-6.exp similarity index 100% rename from tests/lang/eval-okay-scope-6.exp rename to tests/functional/lang/eval-okay-scope-6.exp diff --git a/tests/lang/eval-okay-scope-6.nix b/tests/functional/lang/eval-okay-scope-6.nix similarity index 100% rename from tests/lang/eval-okay-scope-6.nix rename to tests/functional/lang/eval-okay-scope-6.nix diff --git a/tests/lang/eval-okay-scope-7.exp b/tests/functional/lang/eval-okay-scope-7.exp similarity index 100% rename from tests/lang/eval-okay-scope-7.exp rename to tests/functional/lang/eval-okay-scope-7.exp diff --git a/tests/lang/eval-okay-scope-7.nix b/tests/functional/lang/eval-okay-scope-7.nix similarity index 100% rename from tests/lang/eval-okay-scope-7.nix rename to tests/functional/lang/eval-okay-scope-7.nix diff --git a/tests/lang/eval-okay-search-path.exp b/tests/functional/lang/eval-okay-search-path.exp similarity index 100% rename from tests/lang/eval-okay-search-path.exp rename to tests/functional/lang/eval-okay-search-path.exp diff --git a/tests/lang/eval-okay-search-path.flags b/tests/functional/lang/eval-okay-search-path.flags similarity index 100% rename from tests/lang/eval-okay-search-path.flags rename to tests/functional/lang/eval-okay-search-path.flags diff --git a/tests/lang/eval-okay-search-path.nix b/tests/functional/lang/eval-okay-search-path.nix similarity index 100% rename from tests/lang/eval-okay-search-path.nix rename to tests/functional/lang/eval-okay-search-path.nix diff --git a/tests/lang/eval-okay-seq.exp b/tests/functional/lang/eval-okay-seq.exp similarity index 100% rename from tests/lang/eval-okay-seq.exp rename to tests/functional/lang/eval-okay-seq.exp diff --git a/tests/lang/eval-okay-seq.nix b/tests/functional/lang/eval-okay-seq.nix similarity index 100% rename from tests/lang/eval-okay-seq.nix rename to tests/functional/lang/eval-okay-seq.nix diff --git a/tests/lang/eval-okay-sort.exp b/tests/functional/lang/eval-okay-sort.exp similarity index 100% rename from tests/lang/eval-okay-sort.exp rename to tests/functional/lang/eval-okay-sort.exp diff --git a/tests/lang/eval-okay-sort.nix b/tests/functional/lang/eval-okay-sort.nix similarity index 100% rename from tests/lang/eval-okay-sort.nix rename to tests/functional/lang/eval-okay-sort.nix diff --git a/tests/lang/eval-okay-splitversion.exp b/tests/functional/lang/eval-okay-splitversion.exp similarity index 100% rename from tests/lang/eval-okay-splitversion.exp rename to tests/functional/lang/eval-okay-splitversion.exp diff --git a/tests/lang/eval-okay-splitversion.nix b/tests/functional/lang/eval-okay-splitversion.nix similarity index 100% rename from tests/lang/eval-okay-splitversion.nix rename to tests/functional/lang/eval-okay-splitversion.nix diff --git a/tests/lang/eval-okay-string.exp b/tests/functional/lang/eval-okay-string.exp similarity index 100% rename from tests/lang/eval-okay-string.exp rename to tests/functional/lang/eval-okay-string.exp diff --git a/tests/lang/eval-okay-string.nix b/tests/functional/lang/eval-okay-string.nix similarity index 100% rename from tests/lang/eval-okay-string.nix rename to tests/functional/lang/eval-okay-string.nix diff --git a/tests/lang/eval-okay-strings-as-attrs-names.exp b/tests/functional/lang/eval-okay-strings-as-attrs-names.exp similarity index 100% rename from tests/lang/eval-okay-strings-as-attrs-names.exp rename to tests/functional/lang/eval-okay-strings-as-attrs-names.exp diff --git a/tests/lang/eval-okay-strings-as-attrs-names.nix b/tests/functional/lang/eval-okay-strings-as-attrs-names.nix similarity index 100% rename from tests/lang/eval-okay-strings-as-attrs-names.nix rename to tests/functional/lang/eval-okay-strings-as-attrs-names.nix diff --git a/tests/lang/eval-okay-substring.exp b/tests/functional/lang/eval-okay-substring.exp similarity index 100% rename from tests/lang/eval-okay-substring.exp rename to tests/functional/lang/eval-okay-substring.exp diff --git a/tests/lang/eval-okay-substring.nix b/tests/functional/lang/eval-okay-substring.nix similarity index 100% rename from tests/lang/eval-okay-substring.nix rename to tests/functional/lang/eval-okay-substring.nix diff --git a/tests/lang/eval-okay-tail-call-1.exp-disabled b/tests/functional/lang/eval-okay-tail-call-1.exp-disabled similarity index 100% rename from tests/lang/eval-okay-tail-call-1.exp-disabled rename to tests/functional/lang/eval-okay-tail-call-1.exp-disabled diff --git a/tests/lang/eval-okay-tail-call-1.nix b/tests/functional/lang/eval-okay-tail-call-1.nix similarity index 100% rename from tests/lang/eval-okay-tail-call-1.nix rename to tests/functional/lang/eval-okay-tail-call-1.nix diff --git a/tests/lang/eval-okay-tojson.exp b/tests/functional/lang/eval-okay-tojson.exp similarity index 100% rename from tests/lang/eval-okay-tojson.exp rename to tests/functional/lang/eval-okay-tojson.exp diff --git a/tests/lang/eval-okay-tojson.nix b/tests/functional/lang/eval-okay-tojson.nix similarity index 100% rename from tests/lang/eval-okay-tojson.nix rename to tests/functional/lang/eval-okay-tojson.nix diff --git a/tests/lang/eval-okay-toxml.exp b/tests/functional/lang/eval-okay-toxml.exp similarity index 100% rename from tests/lang/eval-okay-toxml.exp rename to tests/functional/lang/eval-okay-toxml.exp diff --git a/tests/lang/eval-okay-toxml.nix b/tests/functional/lang/eval-okay-toxml.nix similarity index 100% rename from tests/lang/eval-okay-toxml.nix rename to tests/functional/lang/eval-okay-toxml.nix diff --git a/tests/lang/eval-okay-toxml2.exp b/tests/functional/lang/eval-okay-toxml2.exp similarity index 100% rename from tests/lang/eval-okay-toxml2.exp rename to tests/functional/lang/eval-okay-toxml2.exp diff --git a/tests/lang/eval-okay-toxml2.nix b/tests/functional/lang/eval-okay-toxml2.nix similarity index 100% rename from tests/lang/eval-okay-toxml2.nix rename to tests/functional/lang/eval-okay-toxml2.nix diff --git a/tests/lang/eval-okay-tryeval.exp b/tests/functional/lang/eval-okay-tryeval.exp similarity index 100% rename from tests/lang/eval-okay-tryeval.exp rename to tests/functional/lang/eval-okay-tryeval.exp diff --git a/tests/lang/eval-okay-tryeval.nix b/tests/functional/lang/eval-okay-tryeval.nix similarity index 100% rename from tests/lang/eval-okay-tryeval.nix rename to tests/functional/lang/eval-okay-tryeval.nix diff --git a/tests/lang/eval-okay-types.exp b/tests/functional/lang/eval-okay-types.exp similarity index 100% rename from tests/lang/eval-okay-types.exp rename to tests/functional/lang/eval-okay-types.exp diff --git a/tests/lang/eval-okay-types.nix b/tests/functional/lang/eval-okay-types.nix similarity index 100% rename from tests/lang/eval-okay-types.nix rename to tests/functional/lang/eval-okay-types.nix diff --git a/tests/lang/eval-okay-versions.exp b/tests/functional/lang/eval-okay-versions.exp similarity index 100% rename from tests/lang/eval-okay-versions.exp rename to tests/functional/lang/eval-okay-versions.exp diff --git a/tests/lang/eval-okay-versions.nix b/tests/functional/lang/eval-okay-versions.nix similarity index 100% rename from tests/lang/eval-okay-versions.nix rename to tests/functional/lang/eval-okay-versions.nix diff --git a/tests/lang/eval-okay-with.exp b/tests/functional/lang/eval-okay-with.exp similarity index 100% rename from tests/lang/eval-okay-with.exp rename to tests/functional/lang/eval-okay-with.exp diff --git a/tests/lang/eval-okay-with.nix b/tests/functional/lang/eval-okay-with.nix similarity index 100% rename from tests/lang/eval-okay-with.nix rename to tests/functional/lang/eval-okay-with.nix diff --git a/tests/lang/eval-okay-xml.exp.xml b/tests/functional/lang/eval-okay-xml.exp.xml similarity index 100% rename from tests/lang/eval-okay-xml.exp.xml rename to tests/functional/lang/eval-okay-xml.exp.xml diff --git a/tests/lang/eval-okay-xml.nix b/tests/functional/lang/eval-okay-xml.nix similarity index 100% rename from tests/lang/eval-okay-xml.nix rename to tests/functional/lang/eval-okay-xml.nix diff --git a/tests/lang/eval-okay-zipAttrsWith.exp b/tests/functional/lang/eval-okay-zipAttrsWith.exp similarity index 100% rename from tests/lang/eval-okay-zipAttrsWith.exp rename to tests/functional/lang/eval-okay-zipAttrsWith.exp diff --git a/tests/lang/eval-okay-zipAttrsWith.nix b/tests/functional/lang/eval-okay-zipAttrsWith.nix similarity index 100% rename from tests/lang/eval-okay-zipAttrsWith.nix rename to tests/functional/lang/eval-okay-zipAttrsWith.nix diff --git a/tests/lang/framework.sh b/tests/functional/lang/framework.sh similarity index 100% rename from tests/lang/framework.sh rename to tests/functional/lang/framework.sh diff --git a/tests/lang/imported.nix b/tests/functional/lang/imported.nix similarity index 100% rename from tests/lang/imported.nix rename to tests/functional/lang/imported.nix diff --git a/tests/lang/imported2.nix b/tests/functional/lang/imported2.nix similarity index 100% rename from tests/lang/imported2.nix rename to tests/functional/lang/imported2.nix diff --git a/tests/lang/lib.nix b/tests/functional/lang/lib.nix similarity index 100% rename from tests/lang/lib.nix rename to tests/functional/lang/lib.nix diff --git a/tests/lang/parse-fail-dup-attrs-1.err.exp b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp similarity index 100% rename from tests/lang/parse-fail-dup-attrs-1.err.exp rename to tests/functional/lang/parse-fail-dup-attrs-1.err.exp diff --git a/tests/lang/parse-fail-dup-attrs-1.nix b/tests/functional/lang/parse-fail-dup-attrs-1.nix similarity index 100% rename from tests/lang/parse-fail-dup-attrs-1.nix rename to tests/functional/lang/parse-fail-dup-attrs-1.nix diff --git a/tests/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp similarity index 100% rename from tests/lang/parse-fail-dup-attrs-2.err.exp rename to tests/functional/lang/parse-fail-dup-attrs-2.err.exp diff --git a/tests/lang/parse-fail-dup-attrs-2.nix b/tests/functional/lang/parse-fail-dup-attrs-2.nix similarity index 100% rename from tests/lang/parse-fail-dup-attrs-2.nix rename to tests/functional/lang/parse-fail-dup-attrs-2.nix diff --git a/tests/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp similarity index 100% rename from tests/lang/parse-fail-dup-attrs-3.err.exp rename to tests/functional/lang/parse-fail-dup-attrs-3.err.exp diff --git a/tests/lang/parse-fail-dup-attrs-3.nix b/tests/functional/lang/parse-fail-dup-attrs-3.nix similarity index 100% rename from tests/lang/parse-fail-dup-attrs-3.nix rename to tests/functional/lang/parse-fail-dup-attrs-3.nix diff --git a/tests/lang/parse-fail-dup-attrs-4.err.exp b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp similarity index 100% rename from tests/lang/parse-fail-dup-attrs-4.err.exp rename to tests/functional/lang/parse-fail-dup-attrs-4.err.exp diff --git a/tests/lang/parse-fail-dup-attrs-4.nix b/tests/functional/lang/parse-fail-dup-attrs-4.nix similarity index 100% rename from tests/lang/parse-fail-dup-attrs-4.nix rename to tests/functional/lang/parse-fail-dup-attrs-4.nix diff --git a/tests/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp similarity index 100% rename from tests/lang/parse-fail-dup-attrs-7.err.exp rename to tests/functional/lang/parse-fail-dup-attrs-7.err.exp diff --git a/tests/lang/parse-fail-dup-attrs-7.nix b/tests/functional/lang/parse-fail-dup-attrs-7.nix similarity index 100% rename from tests/lang/parse-fail-dup-attrs-7.nix rename to tests/functional/lang/parse-fail-dup-attrs-7.nix diff --git a/tests/lang/parse-fail-dup-formals.err.exp b/tests/functional/lang/parse-fail-dup-formals.err.exp similarity index 100% rename from tests/lang/parse-fail-dup-formals.err.exp rename to tests/functional/lang/parse-fail-dup-formals.err.exp diff --git a/tests/lang/parse-fail-dup-formals.nix b/tests/functional/lang/parse-fail-dup-formals.nix similarity index 100% rename from tests/lang/parse-fail-dup-formals.nix rename to tests/functional/lang/parse-fail-dup-formals.nix diff --git a/tests/lang/parse-fail-eof-in-string.err.exp b/tests/functional/lang/parse-fail-eof-in-string.err.exp similarity index 100% rename from tests/lang/parse-fail-eof-in-string.err.exp rename to tests/functional/lang/parse-fail-eof-in-string.err.exp diff --git a/tests/lang/parse-fail-eof-in-string.nix b/tests/functional/lang/parse-fail-eof-in-string.nix similarity index 100% rename from tests/lang/parse-fail-eof-in-string.nix rename to tests/functional/lang/parse-fail-eof-in-string.nix diff --git a/tests/lang/parse-fail-mixed-nested-attrs1.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp similarity index 100% rename from tests/lang/parse-fail-mixed-nested-attrs1.err.exp rename to tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp diff --git a/tests/lang/parse-fail-mixed-nested-attrs1.nix b/tests/functional/lang/parse-fail-mixed-nested-attrs1.nix similarity index 100% rename from tests/lang/parse-fail-mixed-nested-attrs1.nix rename to tests/functional/lang/parse-fail-mixed-nested-attrs1.nix diff --git a/tests/lang/parse-fail-mixed-nested-attrs2.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp similarity index 100% rename from tests/lang/parse-fail-mixed-nested-attrs2.err.exp rename to tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp diff --git a/tests/lang/parse-fail-mixed-nested-attrs2.nix b/tests/functional/lang/parse-fail-mixed-nested-attrs2.nix similarity index 100% rename from tests/lang/parse-fail-mixed-nested-attrs2.nix rename to tests/functional/lang/parse-fail-mixed-nested-attrs2.nix diff --git a/tests/lang/parse-fail-patterns-1.err.exp b/tests/functional/lang/parse-fail-patterns-1.err.exp similarity index 100% rename from tests/lang/parse-fail-patterns-1.err.exp rename to tests/functional/lang/parse-fail-patterns-1.err.exp diff --git a/tests/lang/parse-fail-patterns-1.nix b/tests/functional/lang/parse-fail-patterns-1.nix similarity index 100% rename from tests/lang/parse-fail-patterns-1.nix rename to tests/functional/lang/parse-fail-patterns-1.nix diff --git a/tests/lang/parse-fail-regression-20060610.err.exp b/tests/functional/lang/parse-fail-regression-20060610.err.exp similarity index 100% rename from tests/lang/parse-fail-regression-20060610.err.exp rename to tests/functional/lang/parse-fail-regression-20060610.err.exp diff --git a/tests/lang/parse-fail-regression-20060610.nix b/tests/functional/lang/parse-fail-regression-20060610.nix similarity index 100% rename from tests/lang/parse-fail-regression-20060610.nix rename to tests/functional/lang/parse-fail-regression-20060610.nix diff --git a/tests/lang/parse-fail-undef-var-2.err.exp b/tests/functional/lang/parse-fail-undef-var-2.err.exp similarity index 100% rename from tests/lang/parse-fail-undef-var-2.err.exp rename to tests/functional/lang/parse-fail-undef-var-2.err.exp diff --git a/tests/lang/parse-fail-undef-var-2.nix b/tests/functional/lang/parse-fail-undef-var-2.nix similarity index 100% rename from tests/lang/parse-fail-undef-var-2.nix rename to tests/functional/lang/parse-fail-undef-var-2.nix diff --git a/tests/lang/parse-fail-undef-var.err.exp b/tests/functional/lang/parse-fail-undef-var.err.exp similarity index 100% rename from tests/lang/parse-fail-undef-var.err.exp rename to tests/functional/lang/parse-fail-undef-var.err.exp diff --git a/tests/lang/parse-fail-undef-var.nix b/tests/functional/lang/parse-fail-undef-var.nix similarity index 100% rename from tests/lang/parse-fail-undef-var.nix rename to tests/functional/lang/parse-fail-undef-var.nix diff --git a/tests/lang/parse-fail-utf8.err.exp b/tests/functional/lang/parse-fail-utf8.err.exp similarity index 100% rename from tests/lang/parse-fail-utf8.err.exp rename to tests/functional/lang/parse-fail-utf8.err.exp diff --git a/tests/lang/parse-fail-utf8.nix b/tests/functional/lang/parse-fail-utf8.nix similarity index 100% rename from tests/lang/parse-fail-utf8.nix rename to tests/functional/lang/parse-fail-utf8.nix diff --git a/tests/lang/parse-okay-1.exp b/tests/functional/lang/parse-okay-1.exp similarity index 100% rename from tests/lang/parse-okay-1.exp rename to tests/functional/lang/parse-okay-1.exp diff --git a/tests/lang/parse-okay-1.nix b/tests/functional/lang/parse-okay-1.nix similarity index 100% rename from tests/lang/parse-okay-1.nix rename to tests/functional/lang/parse-okay-1.nix diff --git a/tests/lang/parse-okay-crlf.exp b/tests/functional/lang/parse-okay-crlf.exp similarity index 100% rename from tests/lang/parse-okay-crlf.exp rename to tests/functional/lang/parse-okay-crlf.exp diff --git a/tests/lang/parse-okay-crlf.nix b/tests/functional/lang/parse-okay-crlf.nix similarity index 100% rename from tests/lang/parse-okay-crlf.nix rename to tests/functional/lang/parse-okay-crlf.nix diff --git a/tests/lang/parse-okay-dup-attrs-5.exp b/tests/functional/lang/parse-okay-dup-attrs-5.exp similarity index 100% rename from tests/lang/parse-okay-dup-attrs-5.exp rename to tests/functional/lang/parse-okay-dup-attrs-5.exp diff --git a/tests/lang/parse-okay-dup-attrs-5.nix b/tests/functional/lang/parse-okay-dup-attrs-5.nix similarity index 100% rename from tests/lang/parse-okay-dup-attrs-5.nix rename to tests/functional/lang/parse-okay-dup-attrs-5.nix diff --git a/tests/lang/parse-okay-dup-attrs-6.exp b/tests/functional/lang/parse-okay-dup-attrs-6.exp similarity index 100% rename from tests/lang/parse-okay-dup-attrs-6.exp rename to tests/functional/lang/parse-okay-dup-attrs-6.exp diff --git a/tests/lang/parse-okay-dup-attrs-6.nix b/tests/functional/lang/parse-okay-dup-attrs-6.nix similarity index 100% rename from tests/lang/parse-okay-dup-attrs-6.nix rename to tests/functional/lang/parse-okay-dup-attrs-6.nix diff --git a/tests/lang/parse-okay-mixed-nested-attrs-1.exp b/tests/functional/lang/parse-okay-mixed-nested-attrs-1.exp similarity index 100% rename from tests/lang/parse-okay-mixed-nested-attrs-1.exp rename to tests/functional/lang/parse-okay-mixed-nested-attrs-1.exp diff --git a/tests/lang/parse-okay-mixed-nested-attrs-1.nix b/tests/functional/lang/parse-okay-mixed-nested-attrs-1.nix similarity index 100% rename from tests/lang/parse-okay-mixed-nested-attrs-1.nix rename to tests/functional/lang/parse-okay-mixed-nested-attrs-1.nix diff --git a/tests/lang/parse-okay-mixed-nested-attrs-2.exp b/tests/functional/lang/parse-okay-mixed-nested-attrs-2.exp similarity index 100% rename from tests/lang/parse-okay-mixed-nested-attrs-2.exp rename to tests/functional/lang/parse-okay-mixed-nested-attrs-2.exp diff --git a/tests/lang/parse-okay-mixed-nested-attrs-2.nix b/tests/functional/lang/parse-okay-mixed-nested-attrs-2.nix similarity index 100% rename from tests/lang/parse-okay-mixed-nested-attrs-2.nix rename to tests/functional/lang/parse-okay-mixed-nested-attrs-2.nix diff --git a/tests/lang/parse-okay-mixed-nested-attrs-3.exp b/tests/functional/lang/parse-okay-mixed-nested-attrs-3.exp similarity index 100% rename from tests/lang/parse-okay-mixed-nested-attrs-3.exp rename to tests/functional/lang/parse-okay-mixed-nested-attrs-3.exp diff --git a/tests/lang/parse-okay-mixed-nested-attrs-3.nix b/tests/functional/lang/parse-okay-mixed-nested-attrs-3.nix similarity index 100% rename from tests/lang/parse-okay-mixed-nested-attrs-3.nix rename to tests/functional/lang/parse-okay-mixed-nested-attrs-3.nix diff --git a/tests/lang/parse-okay-regression-20041027.exp b/tests/functional/lang/parse-okay-regression-20041027.exp similarity index 100% rename from tests/lang/parse-okay-regression-20041027.exp rename to tests/functional/lang/parse-okay-regression-20041027.exp diff --git a/tests/lang/parse-okay-regression-20041027.nix b/tests/functional/lang/parse-okay-regression-20041027.nix similarity index 100% rename from tests/lang/parse-okay-regression-20041027.nix rename to tests/functional/lang/parse-okay-regression-20041027.nix diff --git a/tests/lang/parse-okay-regression-751.exp b/tests/functional/lang/parse-okay-regression-751.exp similarity index 100% rename from tests/lang/parse-okay-regression-751.exp rename to tests/functional/lang/parse-okay-regression-751.exp diff --git a/tests/lang/parse-okay-regression-751.nix b/tests/functional/lang/parse-okay-regression-751.nix similarity index 100% rename from tests/lang/parse-okay-regression-751.nix rename to tests/functional/lang/parse-okay-regression-751.nix diff --git a/tests/lang/parse-okay-subversion.exp b/tests/functional/lang/parse-okay-subversion.exp similarity index 100% rename from tests/lang/parse-okay-subversion.exp rename to tests/functional/lang/parse-okay-subversion.exp diff --git a/tests/lang/parse-okay-subversion.nix b/tests/functional/lang/parse-okay-subversion.nix similarity index 100% rename from tests/lang/parse-okay-subversion.nix rename to tests/functional/lang/parse-okay-subversion.nix diff --git a/tests/lang/parse-okay-url.exp b/tests/functional/lang/parse-okay-url.exp similarity index 100% rename from tests/lang/parse-okay-url.exp rename to tests/functional/lang/parse-okay-url.exp diff --git a/tests/lang/parse-okay-url.nix b/tests/functional/lang/parse-okay-url.nix similarity index 100% rename from tests/lang/parse-okay-url.nix rename to tests/functional/lang/parse-okay-url.nix diff --git a/tests/lang/readDir/bar b/tests/functional/lang/readDir/bar similarity index 100% rename from tests/lang/readDir/bar rename to tests/functional/lang/readDir/bar diff --git a/tests/lang/readDir/foo/git-hates-directories b/tests/functional/lang/readDir/foo/git-hates-directories similarity index 100% rename from tests/lang/readDir/foo/git-hates-directories rename to tests/functional/lang/readDir/foo/git-hates-directories diff --git a/tests/lang/readDir/ldir b/tests/functional/lang/readDir/ldir similarity index 100% rename from tests/lang/readDir/ldir rename to tests/functional/lang/readDir/ldir diff --git a/tests/lang/readDir/linked b/tests/functional/lang/readDir/linked similarity index 100% rename from tests/lang/readDir/linked rename to tests/functional/lang/readDir/linked diff --git a/tests/functional/legacy-ssh-store.sh b/tests/functional/legacy-ssh-store.sh new file mode 100644 index 000000000..894efccd4 --- /dev/null +++ b/tests/functional/legacy-ssh-store.sh @@ -0,0 +1,4 @@ +source common.sh + +# Check that store info trusted doesn't yet work with ssh:// +nix --store ssh://localhost?remote-store=$TEST_ROOT/other-store store info --json | jq -e 'has("trusted") | not' diff --git a/tests/linux-sandbox-cert-test.nix b/tests/functional/linux-sandbox-cert-test.nix similarity index 100% rename from tests/linux-sandbox-cert-test.nix rename to tests/functional/linux-sandbox-cert-test.nix diff --git a/tests/linux-sandbox.sh b/tests/functional/linux-sandbox.sh similarity index 100% rename from tests/linux-sandbox.sh rename to tests/functional/linux-sandbox.sh diff --git a/tests/local-overlay-store/add-lower-inner.sh b/tests/functional/local-overlay-store/add-lower-inner.sh similarity index 100% rename from tests/local-overlay-store/add-lower-inner.sh rename to tests/functional/local-overlay-store/add-lower-inner.sh diff --git a/tests/local-overlay-store/add-lower.sh b/tests/functional/local-overlay-store/add-lower.sh similarity index 100% rename from tests/local-overlay-store/add-lower.sh rename to tests/functional/local-overlay-store/add-lower.sh diff --git a/tests/local-overlay-store/bad-uris.sh b/tests/functional/local-overlay-store/bad-uris.sh similarity index 100% rename from tests/local-overlay-store/bad-uris.sh rename to tests/functional/local-overlay-store/bad-uris.sh diff --git a/tests/local-overlay-store/build-inner.sh b/tests/functional/local-overlay-store/build-inner.sh similarity index 100% rename from tests/local-overlay-store/build-inner.sh rename to tests/functional/local-overlay-store/build-inner.sh diff --git a/tests/local-overlay-store/build.sh b/tests/functional/local-overlay-store/build.sh similarity index 100% rename from tests/local-overlay-store/build.sh rename to tests/functional/local-overlay-store/build.sh diff --git a/tests/local-overlay-store/check-post-init-inner.sh b/tests/functional/local-overlay-store/check-post-init-inner.sh similarity index 100% rename from tests/local-overlay-store/check-post-init-inner.sh rename to tests/functional/local-overlay-store/check-post-init-inner.sh diff --git a/tests/local-overlay-store/check-post-init.sh b/tests/functional/local-overlay-store/check-post-init.sh similarity index 100% rename from tests/local-overlay-store/check-post-init.sh rename to tests/functional/local-overlay-store/check-post-init.sh diff --git a/tests/local-overlay-store/common.sh b/tests/functional/local-overlay-store/common.sh similarity index 100% rename from tests/local-overlay-store/common.sh rename to tests/functional/local-overlay-store/common.sh diff --git a/tests/local-overlay-store/delete-duplicate-inner.sh b/tests/functional/local-overlay-store/delete-duplicate-inner.sh similarity index 100% rename from tests/local-overlay-store/delete-duplicate-inner.sh rename to tests/functional/local-overlay-store/delete-duplicate-inner.sh diff --git a/tests/local-overlay-store/delete-duplicate.sh b/tests/functional/local-overlay-store/delete-duplicate.sh similarity index 100% rename from tests/local-overlay-store/delete-duplicate.sh rename to tests/functional/local-overlay-store/delete-duplicate.sh diff --git a/tests/local-overlay-store/delete-refs-inner.sh b/tests/functional/local-overlay-store/delete-refs-inner.sh similarity index 100% rename from tests/local-overlay-store/delete-refs-inner.sh rename to tests/functional/local-overlay-store/delete-refs-inner.sh diff --git a/tests/local-overlay-store/delete-refs.sh b/tests/functional/local-overlay-store/delete-refs.sh similarity index 100% rename from tests/local-overlay-store/delete-refs.sh rename to tests/functional/local-overlay-store/delete-refs.sh diff --git a/tests/local-overlay-store/gc-inner.sh b/tests/functional/local-overlay-store/gc-inner.sh similarity index 100% rename from tests/local-overlay-store/gc-inner.sh rename to tests/functional/local-overlay-store/gc-inner.sh diff --git a/tests/local-overlay-store/gc.sh b/tests/functional/local-overlay-store/gc.sh similarity index 100% rename from tests/local-overlay-store/gc.sh rename to tests/functional/local-overlay-store/gc.sh diff --git a/tests/local-overlay-store/local.mk b/tests/functional/local-overlay-store/local.mk similarity index 100% rename from tests/local-overlay-store/local.mk rename to tests/functional/local-overlay-store/local.mk diff --git a/tests/local-overlay-store/optimise-inner.sh b/tests/functional/local-overlay-store/optimise-inner.sh similarity index 100% rename from tests/local-overlay-store/optimise-inner.sh rename to tests/functional/local-overlay-store/optimise-inner.sh diff --git a/tests/local-overlay-store/optimise.sh b/tests/functional/local-overlay-store/optimise.sh similarity index 100% rename from tests/local-overlay-store/optimise.sh rename to tests/functional/local-overlay-store/optimise.sh diff --git a/tests/local-overlay-store/redundant-add-inner.sh b/tests/functional/local-overlay-store/redundant-add-inner.sh similarity index 100% rename from tests/local-overlay-store/redundant-add-inner.sh rename to tests/functional/local-overlay-store/redundant-add-inner.sh diff --git a/tests/local-overlay-store/redundant-add.sh b/tests/functional/local-overlay-store/redundant-add.sh similarity index 100% rename from tests/local-overlay-store/redundant-add.sh rename to tests/functional/local-overlay-store/redundant-add.sh diff --git a/tests/local-overlay-store/remount.sh b/tests/functional/local-overlay-store/remount.sh similarity index 100% rename from tests/local-overlay-store/remount.sh rename to tests/functional/local-overlay-store/remount.sh diff --git a/tests/local-overlay-store/stale-file-handle-inner.sh b/tests/functional/local-overlay-store/stale-file-handle-inner.sh similarity index 100% rename from tests/local-overlay-store/stale-file-handle-inner.sh rename to tests/functional/local-overlay-store/stale-file-handle-inner.sh diff --git a/tests/local-overlay-store/stale-file-handle.sh b/tests/functional/local-overlay-store/stale-file-handle.sh similarity index 100% rename from tests/local-overlay-store/stale-file-handle.sh rename to tests/functional/local-overlay-store/stale-file-handle.sh diff --git a/tests/local-overlay-store/verify-inner.sh b/tests/functional/local-overlay-store/verify-inner.sh similarity index 100% rename from tests/local-overlay-store/verify-inner.sh rename to tests/functional/local-overlay-store/verify-inner.sh diff --git a/tests/local-overlay-store/verify.sh b/tests/functional/local-overlay-store/verify.sh similarity index 100% rename from tests/local-overlay-store/verify.sh rename to tests/functional/local-overlay-store/verify.sh diff --git a/tests/local-store.sh b/tests/functional/local-store.sh similarity index 79% rename from tests/local-store.sh rename to tests/functional/local-store.sh index 89502f864..f7c8eb3f1 100644 --- a/tests/local-store.sh +++ b/tests/functional/local-store.sh @@ -18,5 +18,5 @@ PATH2=$(nix path-info --store "$PWD/x" $CORRECT_PATH) PATH3=$(nix path-info --store "local?root=$PWD/x" $CORRECT_PATH) [ $CORRECT_PATH == $PATH3 ] -# Ensure store ping trusted works with local store -nix --store ./x store ping --json | jq -e '.trusted' +# Ensure store info trusted works with local store +nix --store ./x store info --json | jq -e '.trusted' diff --git a/tests/local.mk b/tests/functional/local.mk similarity index 82% rename from tests/local.mk rename to tests/functional/local.mk index 2afe91220..3679349f8 100644 --- a/tests/local.mk +++ b/tests/functional/local.mk @@ -12,6 +12,7 @@ nix_tests = \ flakes/check.sh \ flakes/unlocked-override.sh \ flakes/absolute-paths.sh \ + flakes/absolute-attr-paths.sh \ flakes/build-paths.sh \ flakes/flake-in-submodule.sh \ gc.sh \ @@ -90,6 +91,7 @@ nix_tests = \ zstd.sh \ compression-levels.sh \ nix-copy-ssh.sh \ + nix-copy-ssh-ng.sh \ post-hook.sh \ function-trace.sh \ flakes/config.sh \ @@ -102,7 +104,6 @@ nix_tests = \ case-hack.sh \ placeholders.sh \ ssh-relay.sh \ - plugins.sh \ build.sh \ build-delete.sh \ output-normalization.sh \ @@ -112,32 +113,39 @@ nix_tests = \ pass-as-file.sh \ nix-profile.sh \ suggestions.sh \ - store-ping.sh \ + store-info.sh \ fetchClosure.sh \ completions.sh \ flakes/show.sh \ impure-derivations.sh \ path-from-hash-part.sh \ - test-libstoreconsumer.sh \ toString-path.sh \ read-only-store.sh \ - nested-sandboxing.sh + nested-sandboxing.sh \ + impure-env.sh ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh endif +ifeq ($(ENABLE_BUILD), yes) + nix_tests += test-libstoreconsumer.sh + + ifeq ($(BUILD_SHARED_LIBS), 1) + nix_tests += plugins.sh + endif +endif + +$(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \ + $(d)/test-libstoreconsumer/test-libstoreconsumer +$(d)/plugins.sh.test $(d)/plugins.sh.test-debug: \ + $(d)/plugins/libplugintest.$(SO_EXT) + install-tests += $(foreach x, $(nix_tests), $(d)/$(x)) -clean-files += \ +test-clean-files := \ $(d)/common/vars-and-functions.sh \ $(d)/config.nix -test-deps += \ - tests/common/vars-and-functions.sh \ - tests/config.nix \ - tests/test-libstoreconsumer/test-libstoreconsumer - -ifeq ($(BUILD_SHARED_LIBS), 1) - test-deps += tests/plugins/libplugintest.$(SO_EXT) -endif +clean-files += $(test-clean-files) +test-deps += $(test-clean-files) diff --git a/tests/logging.sh b/tests/functional/logging.sh similarity index 100% rename from tests/logging.sh rename to tests/functional/logging.sh diff --git a/tests/misc.sh b/tests/functional/misc.sh similarity index 100% rename from tests/misc.sh rename to tests/functional/misc.sh diff --git a/tests/multiple-outputs.nix b/tests/functional/multiple-outputs.nix similarity index 100% rename from tests/multiple-outputs.nix rename to tests/functional/multiple-outputs.nix diff --git a/tests/multiple-outputs.sh b/tests/functional/multiple-outputs.sh similarity index 100% rename from tests/multiple-outputs.sh rename to tests/functional/multiple-outputs.sh diff --git a/tests/nar-access.nix b/tests/functional/nar-access.nix similarity index 100% rename from tests/nar-access.nix rename to tests/functional/nar-access.nix diff --git a/tests/nar-access.sh b/tests/functional/nar-access.sh similarity index 100% rename from tests/nar-access.sh rename to tests/functional/nar-access.sh diff --git a/tests/nested-sandboxing.sh b/tests/functional/nested-sandboxing.sh similarity index 75% rename from tests/nested-sandboxing.sh rename to tests/functional/nested-sandboxing.sh index d9fa788aa..61fe043c6 100644 --- a/tests/nested-sandboxing.sh +++ b/tests/functional/nested-sandboxing.sh @@ -1,5 +1,5 @@ source common.sh -# This test is run by `tests/nested-sandboxing/runner.nix` in an extra layer of sandboxing. +# This test is run by `tests/functional/nested-sandboxing/runner.nix` in an extra layer of sandboxing. [[ -d /nix/store ]] || skipTest "running this test without Nix's deps being drawn from /nix/store is not yet supported" requireSandboxSupport diff --git a/tests/nested-sandboxing/command.sh b/tests/functional/nested-sandboxing/command.sh similarity index 100% rename from tests/nested-sandboxing/command.sh rename to tests/functional/nested-sandboxing/command.sh diff --git a/tests/nested-sandboxing/runner.nix b/tests/functional/nested-sandboxing/runner.nix similarity index 100% rename from tests/nested-sandboxing/runner.nix rename to tests/functional/nested-sandboxing/runner.nix diff --git a/tests/nix-build-examples.nix b/tests/functional/nix-build-examples.nix similarity index 100% rename from tests/nix-build-examples.nix rename to tests/functional/nix-build-examples.nix diff --git a/tests/nix-build.sh b/tests/functional/nix-build.sh similarity index 100% rename from tests/nix-build.sh rename to tests/functional/nix-build.sh diff --git a/tests/nix-channel.sh b/tests/functional/nix-channel.sh similarity index 100% rename from tests/nix-channel.sh rename to tests/functional/nix-channel.sh diff --git a/tests/nix-collect-garbage-d.sh b/tests/functional/nix-collect-garbage-d.sh similarity index 100% rename from tests/nix-collect-garbage-d.sh rename to tests/functional/nix-collect-garbage-d.sh diff --git a/tests/functional/nix-copy-ssh-ng.sh b/tests/functional/nix-copy-ssh-ng.sh new file mode 100644 index 000000000..463b5e0c4 --- /dev/null +++ b/tests/functional/nix-copy-ssh-ng.sh @@ -0,0 +1,18 @@ +source common.sh + +clearStore +clearCache + +remoteRoot=$TEST_ROOT/store2 +chmod -R u+w "$remoteRoot" || true +rm -rf "$remoteRoot" + +outPath=$(nix-build --no-out-link dependencies.nix) + +nix store info --store "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" + +# Regression test for https://github.com/NixOS/nix/issues/6253 +nix copy --to "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath --no-check-sigs & +nix copy --to "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath --no-check-sigs + +[ -f $remoteRoot$outPath/foobar ] diff --git a/tests/nix-copy-ssh.sh b/tests/functional/nix-copy-ssh.sh similarity index 100% rename from tests/nix-copy-ssh.sh rename to tests/functional/nix-copy-ssh.sh diff --git a/tests/nix-daemon-untrusting.sh b/tests/functional/nix-daemon-untrusting.sh similarity index 100% rename from tests/nix-daemon-untrusting.sh rename to tests/functional/nix-daemon-untrusting.sh diff --git a/tests/nix-profile.sh b/tests/functional/nix-profile.sh similarity index 100% rename from tests/nix-profile.sh rename to tests/functional/nix-profile.sh diff --git a/tests/nix-shell.sh b/tests/functional/nix-shell.sh similarity index 96% rename from tests/nix-shell.sh rename to tests/functional/nix-shell.sh index edaa1249b..13403fadb 100644 --- a/tests/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -84,6 +84,11 @@ chmod a+rx $TEST_ROOT/spaced\ \\\'\"shell.shebang.rb output=$($TEST_ROOT/spaced\ \\\'\"shell.shebang.rb abc ruby) [ "$output" = '-e load(ARGV.shift) -- '"$TEST_ROOT"'/spaced \'\''"shell.shebang.rb abc ruby' ] +# Test nix-shell shebang quoting +sed -e "s|@ENV_PROG@|$(type -P env)|" shell.shebang.nix > $TEST_ROOT/shell.shebang.nix +chmod a+rx $TEST_ROOT/shell.shebang.nix +$TEST_ROOT/shell.shebang.nix + # Test 'nix develop'. nix develop -f "$shellDotNix" shellDrv -c bash -c '[[ -n $stdenv ]]' diff --git a/tests/nix_path.sh b/tests/functional/nix_path.sh similarity index 100% rename from tests/nix_path.sh rename to tests/functional/nix_path.sh diff --git a/tests/optimise-store.sh b/tests/functional/optimise-store.sh similarity index 100% rename from tests/optimise-store.sh rename to tests/functional/optimise-store.sh diff --git a/tests/output-normalization.sh b/tests/functional/output-normalization.sh similarity index 100% rename from tests/output-normalization.sh rename to tests/functional/output-normalization.sh diff --git a/tests/parallel.builder.sh b/tests/functional/parallel.builder.sh similarity index 100% rename from tests/parallel.builder.sh rename to tests/functional/parallel.builder.sh diff --git a/tests/parallel.nix b/tests/functional/parallel.nix similarity index 100% rename from tests/parallel.nix rename to tests/functional/parallel.nix diff --git a/tests/parallel.sh b/tests/functional/parallel.sh similarity index 100% rename from tests/parallel.sh rename to tests/functional/parallel.sh diff --git a/tests/pass-as-file.sh b/tests/functional/pass-as-file.sh similarity index 100% rename from tests/pass-as-file.sh rename to tests/functional/pass-as-file.sh diff --git a/tests/path-from-hash-part.sh b/tests/functional/path-from-hash-part.sh similarity index 100% rename from tests/path-from-hash-part.sh rename to tests/functional/path-from-hash-part.sh diff --git a/tests/path.nix b/tests/functional/path.nix similarity index 100% rename from tests/path.nix rename to tests/functional/path.nix diff --git a/tests/placeholders.sh b/tests/functional/placeholders.sh similarity index 100% rename from tests/placeholders.sh rename to tests/functional/placeholders.sh diff --git a/tests/plugins.sh b/tests/functional/plugins.sh similarity index 100% rename from tests/plugins.sh rename to tests/functional/plugins.sh diff --git a/tests/plugins/local.mk b/tests/functional/plugins/local.mk similarity index 100% rename from tests/plugins/local.mk rename to tests/functional/plugins/local.mk diff --git a/tests/plugins/plugintest.cc b/tests/functional/plugins/plugintest.cc similarity index 100% rename from tests/plugins/plugintest.cc rename to tests/functional/plugins/plugintest.cc diff --git a/tests/post-hook.sh b/tests/functional/post-hook.sh similarity index 100% rename from tests/post-hook.sh rename to tests/functional/post-hook.sh diff --git a/tests/pure-eval.nix b/tests/functional/pure-eval.nix similarity index 100% rename from tests/pure-eval.nix rename to tests/functional/pure-eval.nix diff --git a/tests/pure-eval.sh b/tests/functional/pure-eval.sh similarity index 100% rename from tests/pure-eval.sh rename to tests/functional/pure-eval.sh diff --git a/tests/push-to-store-old.sh b/tests/functional/push-to-store-old.sh similarity index 100% rename from tests/push-to-store-old.sh rename to tests/functional/push-to-store-old.sh diff --git a/tests/push-to-store.sh b/tests/functional/push-to-store.sh similarity index 100% rename from tests/push-to-store.sh rename to tests/functional/push-to-store.sh diff --git a/tests/read-only-store.sh b/tests/functional/read-only-store.sh similarity index 100% rename from tests/read-only-store.sh rename to tests/functional/read-only-store.sh diff --git a/tests/readfile-context.nix b/tests/functional/readfile-context.nix similarity index 100% rename from tests/readfile-context.nix rename to tests/functional/readfile-context.nix diff --git a/tests/readfile-context.sh b/tests/functional/readfile-context.sh similarity index 100% rename from tests/readfile-context.sh rename to tests/functional/readfile-context.sh diff --git a/tests/recursive.nix b/tests/functional/recursive.nix similarity index 100% rename from tests/recursive.nix rename to tests/functional/recursive.nix diff --git a/tests/recursive.sh b/tests/functional/recursive.sh similarity index 100% rename from tests/recursive.sh rename to tests/functional/recursive.sh diff --git a/tests/referrers.sh b/tests/functional/referrers.sh similarity index 100% rename from tests/referrers.sh rename to tests/functional/referrers.sh diff --git a/tests/remote-store.sh b/tests/functional/remote-store.sh similarity index 78% rename from tests/remote-store.sh rename to tests/functional/remote-store.sh index ea32a20d3..5c7bfde46 100644 --- a/tests/remote-store.sh +++ b/tests/functional/remote-store.sh @@ -5,17 +5,17 @@ clearStore # Ensure "fake ssh" remote store works just as legacy fake ssh would. nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store doctor -# Ensure that store ping trusted works with ssh-ng:// -nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store store ping --json | jq -e '.trusted' +# Ensure that store info trusted works with ssh-ng:// +nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store store info --json | jq -e '.trusted' startDaemon if isDaemonNewer "2.15pre0"; then # Ensure that ping works trusted with new daemon - nix store ping --json | jq -e '.trusted' + nix store info --json | jq -e '.trusted' else # And the the field is absent with the old daemon - nix store ping --json | jq -e 'has("trusted") | not' + nix store info --json | jq -e 'has("trusted") | not' fi # Test import-from-derivation through the daemon. @@ -24,7 +24,7 @@ fi import ( mkDerivation { name = "foo"; - bla = import ./dependencies.nix; + bla = import ./dependencies.nix {}; buildCommand = " echo \\\"hi\\\" > $out "; diff --git a/tests/repair.sh b/tests/functional/repair.sh similarity index 100% rename from tests/repair.sh rename to tests/functional/repair.sh diff --git a/tests/repl.sh b/tests/functional/repl.sh similarity index 62% rename from tests/repl.sh rename to tests/functional/repl.sh index 2b3789521..1b779c1f5 100644 --- a/tests/repl.sh +++ b/tests/functional/repl.sh @@ -54,11 +54,17 @@ testRepl # Same thing (kind-of), but with a remote store. testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR" -testReplResponse () { +# Remove ANSI escape sequences. They can prevent grep from finding a match. +stripColors () { + sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g' +} + +testReplResponseGeneral () { + local grepMode="$1"; shift local commands="$1"; shift local expectedResponse="$1"; shift - local response="$(nix repl "$@" <<< "$commands")" - echo "$response" | grepQuiet -s "$expectedResponse" \ + local response="$(nix repl "$@" <<< "$commands" | stripColors)" + echo "$response" | grepQuiet "$grepMode" -s "$expectedResponse" \ || fail "repl command set: $commands @@ -69,7 +75,16 @@ $expectedResponse but with: -$response" +$response +" +} + +testReplResponse () { + testReplResponseGeneral --basic-regexp "$@" +} + +testReplResponseNoRegex () { + testReplResponseGeneral --fixed-strings "$@" } # :a uses the newest version of a symbol @@ -83,25 +98,20 @@ testReplResponse ' # note the escaped \, # \\ # because the second argument is a regex -testReplResponse ' +testReplResponseNoRegex ' "$" + "{hi}" -' '"\\${hi}"' +' '"\${hi}"' testReplResponse ' drvPath ' '".*-simple.drv"' \ -$testDir/simple.nix +--file $testDir/simple.nix testReplResponse ' drvPath ' '".*-simple.drv"' \ --file $testDir/simple.nix --experimental-features 'ca-derivations' -testReplResponse ' -drvPath -' '".*-simple.drv"' \ ---file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations' - mkdir -p flake && cat < flake/flake.nix { outputs = { self }: { @@ -115,7 +125,7 @@ EOF testReplResponse ' foo + baz ' "3" \ - ./flake ./flake\#bar --experimental-features 'flakes repl-flake' + ./flake ./flake\#bar --experimental-features 'flakes' # Test the `:reload` mechansim with flakes: # - Eval `./flake#changingThing` @@ -128,6 +138,37 @@ sleep 1 # Leave the repl the time to eval 'foo' sed -i 's/beforeChange/afterChange/' flake/flake.nix echo ":reload" echo "changingThing" -) | nix repl ./flake --experimental-features 'flakes repl-flake') +) | nix repl ./flake --experimental-features 'flakes') echo "$replResult" | grepQuiet -s beforeChange echo "$replResult" | grepQuiet -s afterChange + +# Test recursive printing and formatting +# Normal output should print attributes in lexicographical order non-recursively +testReplResponseNoRegex ' +{ a = { b = 2; }; l = [ 1 2 3 ]; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } +' '{ a = { ... }; l = [ ... ]; n = 1234; s = "string"; x = { ... }; }' + +# Same for lists, but order is preserved +testReplResponseNoRegex ' +[ 42 1 "thingy" ({ a = 1; }) ([ 1 2 3 ]) ] +' '[ 42 1 "thingy" { ... } [ ... ] ]' + +# Same for let expressions +testReplResponseNoRegex ' +let x = { y = { a = 1; }; inherit x; }; in x +' '{ x = { ... }; y = { ... }; }' + +# The :p command should recursively print sets, but prevent infinite recursion +testReplResponseNoRegex ' +:p { a = { b = 2; }; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } +' '{ a = { b = 2; }; n = 1234; s = "string"; x = { y = { z = { y = «repeated»; }; }; }; }' + +# Same for lists +testReplResponseNoRegex ' +:p [ 42 1 "thingy" (rec { a = 1; b = { inherit a; inherit b; }; }) ([ 1 2 3 ]) ] +' '[ 42 1 "thingy" { a = 1; b = { a = 1; b = «repeated»; }; } [ 1 2 3 ] ]' + +# Same for let expressions +testReplResponseNoRegex ' +:p let x = { y = { a = 1; }; inherit x; }; in x +' '{ x = { x = «repeated»; y = { a = 1; }; }; y = «repeated»; }' diff --git a/tests/restricted.nix b/tests/functional/restricted.nix similarity index 100% rename from tests/restricted.nix rename to tests/functional/restricted.nix diff --git a/tests/restricted.sh b/tests/functional/restricted.sh similarity index 95% rename from tests/restricted.sh rename to tests/functional/restricted.sh index 17f310a4b..197ae7a10 100644 --- a/tests/restricted.sh +++ b/tests/functional/restricted.sh @@ -9,10 +9,10 @@ nix-instantiate --restrict-eval ./simple.nix -I src=. nix-instantiate --restrict-eval ./simple.nix -I src1=simple.nix -I src2=config.nix -I src3=./simple.builder.sh (! nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix') -nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I src=.. +nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I src=../.. -(! nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../src/nix-channel') -nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../src/nix-channel' -I src=../src +(! nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel') +nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel' -I src=../../src (! nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in ') nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in ' -I src=. diff --git a/tests/search.nix b/tests/functional/search.nix similarity index 100% rename from tests/search.nix rename to tests/functional/search.nix diff --git a/tests/search.sh b/tests/functional/search.sh similarity index 100% rename from tests/search.sh rename to tests/functional/search.sh diff --git a/tests/secure-drv-outputs.nix b/tests/functional/secure-drv-outputs.nix similarity index 100% rename from tests/secure-drv-outputs.nix rename to tests/functional/secure-drv-outputs.nix diff --git a/tests/secure-drv-outputs.sh b/tests/functional/secure-drv-outputs.sh similarity index 100% rename from tests/secure-drv-outputs.sh rename to tests/functional/secure-drv-outputs.sh diff --git a/tests/selfref-gc.sh b/tests/functional/selfref-gc.sh similarity index 100% rename from tests/selfref-gc.sh rename to tests/functional/selfref-gc.sh diff --git a/tests/shell-hello.nix b/tests/functional/shell-hello.nix similarity index 100% rename from tests/shell-hello.nix rename to tests/functional/shell-hello.nix diff --git a/tests/shell.nix b/tests/functional/shell.nix similarity index 100% rename from tests/shell.nix rename to tests/functional/shell.nix diff --git a/tests/shell.sh b/tests/functional/shell.sh similarity index 100% rename from tests/shell.sh rename to tests/functional/shell.sh diff --git a/tests/functional/shell.shebang.nix b/tests/functional/shell.shebang.nix new file mode 100755 index 000000000..08e43d53c --- /dev/null +++ b/tests/functional/shell.shebang.nix @@ -0,0 +1,10 @@ +#! @ENV_PROG@ nix-shell +#! nix-shell -I nixpkgs=shell.nix --no-substitute +#! nix-shell --argstr s1 'foo "bar" \baz'"'"'qux' --argstr s2 "foo 'bar' \"\baz" --argstr s3 \foo\ bar\'baz --argstr s4 '' +#! nix-shell shell.shebang.nix --command true +{ s1, s2, s3, s4 }: +assert s1 == ''foo "bar" \baz'qux''; +assert s2 == "foo 'bar' \"baz"; +assert s3 == "foo bar'baz"; +assert s4 == ""; +(import {}).runCommand "nix-shell" {} "" diff --git a/tests/shell.shebang.rb b/tests/functional/shell.shebang.rb similarity index 100% rename from tests/shell.shebang.rb rename to tests/functional/shell.shebang.rb diff --git a/tests/shell.shebang.sh b/tests/functional/shell.shebang.sh similarity index 100% rename from tests/shell.shebang.sh rename to tests/functional/shell.shebang.sh diff --git a/tests/signing.sh b/tests/functional/signing.sh similarity index 100% rename from tests/signing.sh rename to tests/functional/signing.sh diff --git a/tests/simple-failing.nix b/tests/functional/simple-failing.nix similarity index 100% rename from tests/simple-failing.nix rename to tests/functional/simple-failing.nix diff --git a/tests/simple.builder.sh b/tests/functional/simple.builder.sh similarity index 100% rename from tests/simple.builder.sh rename to tests/functional/simple.builder.sh diff --git a/tests/simple.nix b/tests/functional/simple.nix similarity index 100% rename from tests/simple.nix rename to tests/functional/simple.nix diff --git a/tests/simple.sh b/tests/functional/simple.sh similarity index 100% rename from tests/simple.sh rename to tests/functional/simple.sh diff --git a/tests/ssh-relay.sh b/tests/functional/ssh-relay.sh similarity index 100% rename from tests/ssh-relay.sh rename to tests/functional/ssh-relay.sh diff --git a/tests/store-ping.sh b/tests/functional/store-info.sh similarity index 69% rename from tests/store-ping.sh rename to tests/functional/store-info.sh index 9846c7d3d..c002e50be 100644 --- a/tests/store-ping.sh +++ b/tests/functional/store-info.sh @@ -1,7 +1,7 @@ source common.sh -STORE_INFO=$(nix store ping 2>&1) -STORE_INFO_JSON=$(nix store ping --json) +STORE_INFO=$(nix store info 2>&1) +STORE_INFO_JSON=$(nix store info --json) echo "$STORE_INFO" | grep "Store URL: ${NIX_REMOTE}" @@ -11,7 +11,7 @@ if [[ -v NIX_DAEMON_PACKAGE ]] && isDaemonNewer "2.7.0pre20220126"; then [[ "$(echo "$STORE_INFO_JSON" | jq -r ".version")" == "$DAEMON_VERSION" ]] fi -expect 127 NIX_REMOTE=unix:$PWD/store nix store ping || \ - fail "nix store ping on a non-existent store should fail" +expect 127 NIX_REMOTE=unix:$PWD/store nix store info || \ + fail "nix store info on a non-existent store should fail" [[ "$(echo "$STORE_INFO_JSON" | jq -r ".url")" == "${NIX_REMOTE:-local}" ]] diff --git a/tests/structured-attrs-shell.nix b/tests/functional/structured-attrs-shell.nix similarity index 100% rename from tests/structured-attrs-shell.nix rename to tests/functional/structured-attrs-shell.nix diff --git a/tests/structured-attrs.nix b/tests/functional/structured-attrs.nix similarity index 100% rename from tests/structured-attrs.nix rename to tests/functional/structured-attrs.nix diff --git a/tests/structured-attrs.sh b/tests/functional/structured-attrs.sh similarity index 50% rename from tests/structured-attrs.sh rename to tests/functional/structured-attrs.sh index 378dbc735..f11992dcd 100644 --- a/tests/structured-attrs.sh +++ b/tests/functional/structured-attrs.sh @@ -15,9 +15,21 @@ nix-build structured-attrs.nix -A all -o $TEST_ROOT/result export NIX_BUILD_SHELL=$SHELL env NIX_PATH=nixpkgs=shell.nix nix-shell structured-attrs-shell.nix \ - --run 'test -e .attrs.json; test "3" = "$(jq ".my.list|length" < $NIX_ATTRS_JSON_FILE)"' + --run 'test "3" = "$(jq ".my.list|length" < $NIX_ATTRS_JSON_FILE)"' + +nix develop -f structured-attrs-shell.nix -c bash -c 'test "3" = "$(jq ".my.list|length" < $NIX_ATTRS_JSON_FILE)"' # `nix develop` is a slightly special way of dealing with environment vars, it parses # these from a shell-file exported from a derivation. This is to test especially `outputs` # (which is an associative array in thsi case) being fine. nix develop -f structured-attrs-shell.nix -c bash -c 'test -n "$out"' + +nix print-dev-env -f structured-attrs-shell.nix | grepQuiet 'NIX_ATTRS_JSON_FILE=' +nix print-dev-env -f structured-attrs-shell.nix | grepQuiet 'NIX_ATTRS_SH_FILE=' +nix print-dev-env -f shell.nix shellDrv | grepQuietInverse 'NIX_ATTRS_SH_FILE' + +jsonOut="$(nix print-dev-env -f structured-attrs-shell.nix --json)" + +test "$(<<<"$jsonOut" jq '.structuredAttrs|keys|.[]' -r)" = "$(printf ".attrs.json\n.attrs.sh")" + +test "$(<<<"$jsonOut" jq '.variables.out.value' -r)" = "$(<<<"$jsonOut" jq '.structuredAttrs.".attrs.json"' -r | jq -r '.outputs.out')" diff --git a/tests/substitute-with-invalid-ca.sh b/tests/functional/substitute-with-invalid-ca.sh similarity index 100% rename from tests/substitute-with-invalid-ca.sh rename to tests/functional/substitute-with-invalid-ca.sh diff --git a/tests/suggestions.sh b/tests/functional/suggestions.sh similarity index 100% rename from tests/suggestions.sh rename to tests/functional/suggestions.sh diff --git a/tests/supplementary-groups.sh b/tests/functional/supplementary-groups.sh similarity index 100% rename from tests/supplementary-groups.sh rename to tests/functional/supplementary-groups.sh diff --git a/tests/tarball.sh b/tests/functional/tarball.sh similarity index 95% rename from tests/tarball.sh rename to tests/functional/tarball.sh index 5f39658c9..6e621a28c 100644 --- a/tests/tarball.sh +++ b/tests/functional/tarball.sh @@ -9,6 +9,7 @@ rm -rf $tarroot mkdir -p $tarroot cp dependencies.nix $tarroot/default.nix cp config.nix dependencies.builder*.sh $tarroot/ +touch -d '@1000000000' $tarroot $tarroot/* hash=$(nix hash path $tarroot) @@ -36,6 +37,8 @@ test_tarball() { nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file:///does-not-exist/must-remain-unused/$tarball; narHash = \"$hash\"; })" expectStderr 102 nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"sha256-xdKv2pq/IiwLSnBBJXW8hNowI4MrdZfW+SYqDQs7Tzc=\"; })" | grep 'NAR hash mismatch in input' + [[ $(nix eval --impure --expr "(fetchTree file://$tarball).lastModified") = 1000000000 ]] + nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" >&2 nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" 2>&1 | grep 'true' diff --git a/tests/test-infra.sh b/tests/functional/test-infra.sh similarity index 100% rename from tests/test-infra.sh rename to tests/functional/test-infra.sh diff --git a/tests/test-libstoreconsumer.sh b/tests/functional/test-libstoreconsumer.sh similarity index 100% rename from tests/test-libstoreconsumer.sh rename to tests/functional/test-libstoreconsumer.sh diff --git a/tests/test-libstoreconsumer/README.md b/tests/functional/test-libstoreconsumer/README.md similarity index 100% rename from tests/test-libstoreconsumer/README.md rename to tests/functional/test-libstoreconsumer/README.md diff --git a/tests/test-libstoreconsumer/local.mk b/tests/functional/test-libstoreconsumer/local.mk similarity index 100% rename from tests/test-libstoreconsumer/local.mk rename to tests/functional/test-libstoreconsumer/local.mk diff --git a/tests/test-libstoreconsumer/main.cc b/tests/functional/test-libstoreconsumer/main.cc similarity index 92% rename from tests/test-libstoreconsumer/main.cc rename to tests/functional/test-libstoreconsumer/main.cc index 31b6d8ef1..c61489af6 100644 --- a/tests/test-libstoreconsumer/main.cc +++ b/tests/functional/test-libstoreconsumer/main.cc @@ -23,7 +23,7 @@ int main (int argc, char **argv) std::vector paths { DerivedPath::Built { - .drvPath = store->parseStorePath(drvPath), + .drvPath = makeConstantStorePathRef(store->parseStorePath(drvPath)), .outputs = OutputsSpec::Names{"out"} } }; diff --git a/tests/timeout.nix b/tests/functional/timeout.nix similarity index 100% rename from tests/timeout.nix rename to tests/functional/timeout.nix diff --git a/tests/timeout.sh b/tests/functional/timeout.sh similarity index 100% rename from tests/timeout.sh rename to tests/functional/timeout.sh diff --git a/tests/toString-path.sh b/tests/functional/toString-path.sh similarity index 100% rename from tests/toString-path.sh rename to tests/functional/toString-path.sh diff --git a/tests/undefined-variable.nix b/tests/functional/undefined-variable.nix similarity index 100% rename from tests/undefined-variable.nix rename to tests/functional/undefined-variable.nix diff --git a/tests/user-envs-migration.sh b/tests/functional/user-envs-migration.sh similarity index 100% rename from tests/user-envs-migration.sh rename to tests/functional/user-envs-migration.sh diff --git a/tests/user-envs.builder.sh b/tests/functional/user-envs.builder.sh similarity index 100% rename from tests/user-envs.builder.sh rename to tests/functional/user-envs.builder.sh diff --git a/tests/user-envs.nix b/tests/functional/user-envs.nix similarity index 100% rename from tests/user-envs.nix rename to tests/functional/user-envs.nix diff --git a/tests/user-envs.sh b/tests/functional/user-envs.sh similarity index 100% rename from tests/user-envs.sh rename to tests/functional/user-envs.sh diff --git a/tests/why-depends.sh b/tests/functional/why-depends.sh similarity index 100% rename from tests/why-depends.sh rename to tests/functional/why-depends.sh diff --git a/tests/zstd.sh b/tests/functional/zstd.sh similarity index 100% rename from tests/zstd.sh rename to tests/functional/zstd.sh diff --git a/tests/installer/default.nix b/tests/installer/default.nix index 49cfd2bcc..238c6ac8e 100644 --- a/tests/installer/default.nix +++ b/tests/installer/default.nix @@ -213,7 +213,7 @@ let source /etc/bashrc || true nix-env --version - nix --extra-experimental-features nix-command store ping + nix --extra-experimental-features nix-command store info out=\$(nix-build --no-substitute -E 'derivation { name = "foo"; system = "x86_64-linux"; builder = "/bin/sh"; args = ["-c" "echo foobar > \$out"]; }') [[ \$(cat \$out) = foobar ]] diff --git a/tests/lang/eval-fail-antiquoted-path.err.exp b/tests/lang/eval-fail-antiquoted-path.err.exp deleted file mode 100644 index 425deba42..000000000 --- a/tests/lang/eval-fail-antiquoted-path.err.exp +++ /dev/null @@ -1 +0,0 @@ -error: getting attributes of path ‘PWD/lang/fnord’: No such file or directory diff --git a/tests/lang/eval-fail-bad-antiquote-1.err.exp b/tests/lang/eval-fail-bad-antiquote-1.err.exp deleted file mode 100644 index cf94f53bc..000000000 --- a/tests/lang/eval-fail-bad-antiquote-1.err.exp +++ /dev/null @@ -1,10 +0,0 @@ -error: - … while evaluating a path segment - - at /pwd/lang/eval-fail-bad-antiquote-1.nix:1:2: - - 1| "${x: x}" - | ^ - 2| - - error: cannot coerce a function to a string diff --git a/tests/lang/eval-fail-bad-antiquote-2.err.exp b/tests/lang/eval-fail-bad-antiquote-2.err.exp deleted file mode 100644 index c8fe39d12..000000000 --- a/tests/lang/eval-fail-bad-antiquote-2.err.exp +++ /dev/null @@ -1 +0,0 @@ -error: operation 'addToStoreFromDump' is not supported by store 'dummy' diff --git a/tests/lang/eval-fail-bad-antiquote-3.err.exp b/tests/lang/eval-fail-bad-antiquote-3.err.exp deleted file mode 100644 index fbefbc826..000000000 --- a/tests/lang/eval-fail-bad-antiquote-3.err.exp +++ /dev/null @@ -1,10 +0,0 @@ -error: - … while evaluating a path segment - - at /pwd/lang/eval-fail-bad-antiquote-3.nix:1:3: - - 1| ''${x: x}'' - | ^ - 2| - - error: cannot coerce a function to a string diff --git a/tests/lang/eval-fail-bad-string-interpolation-2.err.exp b/tests/lang/eval-fail-bad-string-interpolation-2.err.exp deleted file mode 100644 index c8fe39d12..000000000 --- a/tests/lang/eval-fail-bad-string-interpolation-2.err.exp +++ /dev/null @@ -1 +0,0 @@ -error: operation 'addToStoreFromDump' is not supported by store 'dummy' diff --git a/tests/lang/eval-fail-dup-dynamic-attrs.err.exp b/tests/lang/eval-fail-dup-dynamic-attrs.err.exp deleted file mode 100644 index e01f8e6d0..000000000 --- a/tests/lang/eval-fail-dup-dynamic-attrs.err.exp +++ /dev/null @@ -1,8 +0,0 @@ -error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11 - - at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11: - - 2| set = { "${"" + "b"}" = 1; }; - 3| set = { "${"b" + ""}" = 2; }; - | ^ - 4| } diff --git a/tests/lang/eval-fail-nonexist-path.err.exp b/tests/lang/eval-fail-nonexist-path.err.exp deleted file mode 100644 index c8fe39d12..000000000 --- a/tests/lang/eval-fail-nonexist-path.err.exp +++ /dev/null @@ -1 +0,0 @@ -error: operation 'addToStoreFromDump' is not supported by store 'dummy' diff --git a/tests/lang/eval-okay-path.exp b/tests/lang/eval-okay-path.exp deleted file mode 100644 index 3ce7f8283..000000000 --- a/tests/lang/eval-okay-path.exp +++ /dev/null @@ -1 +0,0 @@ -"/nix/store/ya937r4ydw0l6kayq8jkyqaips9c75jm-output" diff --git a/tests/lang/eval-okay-path.nix b/tests/lang/eval-okay-path.nix deleted file mode 100644 index e67168cf3..000000000 --- a/tests/lang/eval-okay-path.nix +++ /dev/null @@ -1,7 +0,0 @@ -builtins.path - { path = ./.; - filter = path: _: baseNameOf path == "data"; - recursive = true; - sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw"; - name = "output"; - } diff --git a/tests/lang/eval-okay-pathexists.nix b/tests/lang/eval-okay-pathexists.nix deleted file mode 100644 index 50c28ee0c..000000000 --- a/tests/lang/eval-okay-pathexists.nix +++ /dev/null @@ -1,5 +0,0 @@ -builtins.pathExists (builtins.toPath ./lib.nix) -&& builtins.pathExists (builtins.toPath (builtins.toString ./lib.nix)) -&& !builtins.pathExists (builtins.toPath (builtins.toString ./bla.nix)) -&& builtins.pathExists ./lib.nix -&& !builtins.pathExists ./bla.nix diff --git a/tests/lang/parse-fail-dup-attrs-6.err.exp b/tests/lang/parse-fail-dup-attrs-6.err.exp deleted file mode 100644 index 74823fc25..000000000 --- a/tests/lang/parse-fail-dup-attrs-6.err.exp +++ /dev/null @@ -1 +0,0 @@ -error: attribute ‘services.ssh’ at (string):3:3 already defined at (string):2:3 diff --git a/tests/legacy-ssh-store.sh b/tests/legacy-ssh-store.sh deleted file mode 100644 index 71b716b84..000000000 --- a/tests/legacy-ssh-store.sh +++ /dev/null @@ -1,4 +0,0 @@ -source common.sh - -# Check that store ping trusted doesn't yet work with ssh:// -nix --store ssh://localhost?remote-store=$TEST_ROOT/other-store store ping --json | jq -e 'has("trusted") | not' diff --git a/tests/nixos/containers/systemd-nspawn.nix b/tests/nixos/containers/systemd-nspawn.nix index f54f32f2a..1dad4ebd7 100644 --- a/tests/nixos/containers/systemd-nspawn.nix +++ b/tests/nixos/containers/systemd-nspawn.nix @@ -73,6 +73,8 @@ runCommand "test" --resolv-conf=off \ --bind-ro=/nix/store \ --bind=$out \ + --bind=/proc:/run/host/proc \ + --bind=/sys:/run/host/sys \ --private-network \ $toplevel/init '' diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix new file mode 100644 index 000000000..b391d7ef2 --- /dev/null +++ b/tests/nixos/default.nix @@ -0,0 +1,41 @@ +{ lib, nixpkgs, nixpkgsFor }: + +let + + nixos-lib = import (nixpkgs + "/nixos/lib") { }; + + # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests + runNixOSTestFor = system: test: nixos-lib.runTest { + imports = [ test ]; + hostPkgs = nixpkgsFor.${system}.native; + defaults = { + nixpkgs.pkgs = nixpkgsFor.${system}.native; + }; + _module.args.nixpkgs = nixpkgs; + }; + +in + +{ + authorization = runNixOSTestFor "x86_64-linux" ./authorization.nix; + + remoteBuilds = runNixOSTestFor "x86_64-linux" ./remote-builds.nix; + + nix-copy-closure = runNixOSTestFor "x86_64-linux" ./nix-copy-closure.nix; + + nix-copy = runNixOSTestFor "x86_64-linux" ./nix-copy.nix; + + nssPreload = runNixOSTestFor "x86_64-linux" ./nss-preload.nix; + + githubFlakes = runNixOSTestFor "x86_64-linux" ./github-flakes.nix; + + sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./sourcehut-flakes.nix; + + tarballFlakes = runNixOSTestFor "x86_64-linux" ./tarball-flakes.nix; + + containers = runNixOSTestFor "x86_64-linux" ./containers/containers.nix; + + setuid = lib.genAttrs + ["i686-linux" "x86_64-linux"] + (system: runNixOSTestFor system ./setuid.nix); +} diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index e4d347691..62ae8871b 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -82,7 +82,7 @@ let dir=NixOS-nixpkgs-${nixpkgs.shortRev} cp -prd ${nixpkgs} $dir # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- + find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference ''; in @@ -186,6 +186,10 @@ in client.succeed("nix registry pin nixpkgs") client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2") + # Test fetchTree on a github URL. + hash = client.succeed(f"nix eval --raw --expr '(fetchTree {info['url']}).narHash'") + assert hash == info['locked']['narHash'] + # Shut down the web server. The flake should be cached on the client. github.succeed("systemctl stop httpd.service") diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix index ef053de03..2981cc2b8 100644 --- a/tests/nixos/nix-copy.nix +++ b/tests/nixos/nix-copy.nix @@ -1,4 +1,6 @@ # Test that ‘nix copy’ works over ssh. +# Run interactively with: +# rm key key.pub; nix run .#hydraJobs.tests.nix-copy.driverInteractive { lib, config, nixpkgs, hostPkgs, ... }: @@ -55,7 +57,9 @@ in { server.wait_for_unit("sshd") client.wait_for_unit("network.target") client.wait_for_unit("getty@tty1.service") - client.wait_for_text("]#") + # Either the prompt: ]# + # or an OCR misreading of it: 1# + client.wait_for_text("[]1]#") # Copy the closure of package A from the client to the server using password authentication, # and check that all prompts are visible diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index a76fed020..6e8d884a0 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -47,7 +47,7 @@ let cp -prd ${nixpkgs} $dir # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- + find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- mkdir -p $out/archive tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference diff --git a/tests/nixos/tarball-flakes.nix b/tests/nixos/tarball-flakes.nix index 1d43a5d04..e30d15739 100644 --- a/tests/nixos/tarball-flakes.nix +++ b/tests/nixos/tarball-flakes.nix @@ -11,7 +11,7 @@ let dir=nixpkgs-${nixpkgs.shortRev} cp -prd ${nixpkgs} $dir # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- + find $dir -print0 | xargs -0 touch -h -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- tar cfz $out/stable/${nixpkgs.rev}.tar.gz $dir --hard-dereference echo 'Redirect "/latest.tar.gz" "/stable/${nixpkgs.rev}.tar.gz"' > $out/.htaccess diff --git a/unit-test-data/libstore/common-protocol/content-address.bin b/unit-test-data/libstore/common-protocol/content-address.bin new file mode 100644 index 000000000..8f14bcdb3 Binary files /dev/null and b/unit-test-data/libstore/common-protocol/content-address.bin differ diff --git a/unit-test-data/libstore/common-protocol/drv-output.bin b/unit-test-data/libstore/common-protocol/drv-output.bin new file mode 100644 index 000000000..800a45fd8 Binary files /dev/null and b/unit-test-data/libstore/common-protocol/drv-output.bin differ diff --git a/unit-test-data/libstore/common-protocol/optional-content-address.bin b/unit-test-data/libstore/common-protocol/optional-content-address.bin new file mode 100644 index 000000000..f8cfe65ba Binary files /dev/null and b/unit-test-data/libstore/common-protocol/optional-content-address.bin differ diff --git a/unit-test-data/libstore/common-protocol/optional-store-path.bin b/unit-test-data/libstore/common-protocol/optional-store-path.bin new file mode 100644 index 000000000..4fbca5576 Binary files /dev/null and b/unit-test-data/libstore/common-protocol/optional-store-path.bin differ diff --git a/unit-test-data/libstore/common-protocol/realisation.bin b/unit-test-data/libstore/common-protocol/realisation.bin new file mode 100644 index 000000000..2176c6c4a Binary files /dev/null and b/unit-test-data/libstore/common-protocol/realisation.bin differ diff --git a/unit-test-data/libstore/common-protocol/set.bin b/unit-test-data/libstore/common-protocol/set.bin new file mode 100644 index 000000000..ce11ede7f Binary files /dev/null and b/unit-test-data/libstore/common-protocol/set.bin differ diff --git a/unit-test-data/libstore/common-protocol/store-path.bin b/unit-test-data/libstore/common-protocol/store-path.bin new file mode 100644 index 000000000..3fc05f298 Binary files /dev/null and b/unit-test-data/libstore/common-protocol/store-path.bin differ diff --git a/unit-test-data/libstore/common-protocol/string.bin b/unit-test-data/libstore/common-protocol/string.bin new file mode 100644 index 000000000..aa7b5a604 Binary files /dev/null and b/unit-test-data/libstore/common-protocol/string.bin differ diff --git a/unit-test-data/libstore/common-protocol/vector.bin b/unit-test-data/libstore/common-protocol/vector.bin new file mode 100644 index 000000000..7a37c8cd1 Binary files /dev/null and b/unit-test-data/libstore/common-protocol/vector.bin differ diff --git a/unit-test-data/libstore/serve-protocol/build-result-2.2.bin b/unit-test-data/libstore/serve-protocol/build-result-2.2.bin new file mode 100644 index 000000000..ae684778b Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/build-result-2.2.bin differ diff --git a/unit-test-data/libstore/serve-protocol/build-result-2.3.bin b/unit-test-data/libstore/serve-protocol/build-result-2.3.bin new file mode 100644 index 000000000..d51e08dfc Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/build-result-2.3.bin differ diff --git a/unit-test-data/libstore/serve-protocol/build-result-2.6.bin b/unit-test-data/libstore/serve-protocol/build-result-2.6.bin new file mode 100644 index 000000000..b02c706ea Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/build-result-2.6.bin differ diff --git a/unit-test-data/libstore/serve-protocol/content-address.bin b/unit-test-data/libstore/serve-protocol/content-address.bin new file mode 100644 index 000000000..8f14bcdb3 Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/content-address.bin differ diff --git a/unit-test-data/libstore/serve-protocol/drv-output.bin b/unit-test-data/libstore/serve-protocol/drv-output.bin new file mode 100644 index 000000000..800a45fd8 Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/drv-output.bin differ diff --git a/unit-test-data/libstore/serve-protocol/optional-content-address.bin b/unit-test-data/libstore/serve-protocol/optional-content-address.bin new file mode 100644 index 000000000..f8cfe65ba Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/optional-content-address.bin differ diff --git a/unit-test-data/libstore/serve-protocol/optional-store-path.bin b/unit-test-data/libstore/serve-protocol/optional-store-path.bin new file mode 100644 index 000000000..4fbca5576 Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/optional-store-path.bin differ diff --git a/unit-test-data/libstore/serve-protocol/realisation.bin b/unit-test-data/libstore/serve-protocol/realisation.bin new file mode 100644 index 000000000..2176c6c4a Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/realisation.bin differ diff --git a/unit-test-data/libstore/serve-protocol/set.bin b/unit-test-data/libstore/serve-protocol/set.bin new file mode 100644 index 000000000..ce11ede7f Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/set.bin differ diff --git a/unit-test-data/libstore/serve-protocol/store-path.bin b/unit-test-data/libstore/serve-protocol/store-path.bin new file mode 100644 index 000000000..3fc05f298 Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/store-path.bin differ diff --git a/unit-test-data/libstore/serve-protocol/string.bin b/unit-test-data/libstore/serve-protocol/string.bin new file mode 100644 index 000000000..aa7b5a604 Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/string.bin differ diff --git a/unit-test-data/libstore/serve-protocol/vector.bin b/unit-test-data/libstore/serve-protocol/vector.bin new file mode 100644 index 000000000..7a37c8cd1 Binary files /dev/null and b/unit-test-data/libstore/serve-protocol/vector.bin differ diff --git a/unit-test-data/libstore/worker-protocol/build-result-1.27.bin b/unit-test-data/libstore/worker-protocol/build-result-1.27.bin new file mode 100644 index 000000000..ae684778b Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/build-result-1.27.bin differ diff --git a/unit-test-data/libstore/worker-protocol/build-result-1.28.bin b/unit-test-data/libstore/worker-protocol/build-result-1.28.bin new file mode 100644 index 000000000..74bcd5cf9 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/build-result-1.28.bin differ diff --git a/unit-test-data/libstore/worker-protocol/build-result-1.29.bin b/unit-test-data/libstore/worker-protocol/build-result-1.29.bin new file mode 100644 index 000000000..b02c706ea Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/build-result-1.29.bin differ diff --git a/unit-test-data/libstore/worker-protocol/content-address.bin b/unit-test-data/libstore/worker-protocol/content-address.bin new file mode 100644 index 000000000..8f14bcdb3 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/content-address.bin differ diff --git a/unit-test-data/libstore/worker-protocol/derived-path-1.29.bin b/unit-test-data/libstore/worker-protocol/derived-path-1.29.bin new file mode 100644 index 000000000..05ea7678a Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/derived-path-1.29.bin differ diff --git a/unit-test-data/libstore/worker-protocol/derived-path-1.30.bin b/unit-test-data/libstore/worker-protocol/derived-path-1.30.bin new file mode 100644 index 000000000..0729b2690 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/derived-path-1.30.bin differ diff --git a/unit-test-data/libstore/worker-protocol/drv-output.bin b/unit-test-data/libstore/worker-protocol/drv-output.bin new file mode 100644 index 000000000..800a45fd8 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/drv-output.bin differ diff --git a/unit-test-data/libstore/worker-protocol/keyed-build-result-1.29.bin b/unit-test-data/libstore/worker-protocol/keyed-build-result-1.29.bin new file mode 100644 index 000000000..c5b3c7f36 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/keyed-build-result-1.29.bin differ diff --git a/unit-test-data/libstore/worker-protocol/optional-content-address.bin b/unit-test-data/libstore/worker-protocol/optional-content-address.bin new file mode 100644 index 000000000..f8cfe65ba Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/optional-content-address.bin differ diff --git a/unit-test-data/libstore/worker-protocol/optional-store-path.bin b/unit-test-data/libstore/worker-protocol/optional-store-path.bin new file mode 100644 index 000000000..4fbca5576 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/optional-store-path.bin differ diff --git a/unit-test-data/libstore/worker-protocol/optional-trusted-flag.bin b/unit-test-data/libstore/worker-protocol/optional-trusted-flag.bin new file mode 100644 index 000000000..51b239409 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/optional-trusted-flag.bin differ diff --git a/unit-test-data/libstore/worker-protocol/realisation.bin b/unit-test-data/libstore/worker-protocol/realisation.bin new file mode 100644 index 000000000..2176c6c4a Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/realisation.bin differ diff --git a/unit-test-data/libstore/worker-protocol/set.bin b/unit-test-data/libstore/worker-protocol/set.bin new file mode 100644 index 000000000..ce11ede7f Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/set.bin differ diff --git a/unit-test-data/libstore/worker-protocol/store-path.bin b/unit-test-data/libstore/worker-protocol/store-path.bin new file mode 100644 index 000000000..3fc05f298 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/store-path.bin differ diff --git a/unit-test-data/libstore/worker-protocol/string.bin b/unit-test-data/libstore/worker-protocol/string.bin new file mode 100644 index 000000000..aa7b5a604 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/string.bin differ diff --git a/unit-test-data/libstore/worker-protocol/unkeyed-valid-path-info-1.15.bin b/unit-test-data/libstore/worker-protocol/unkeyed-valid-path-info-1.15.bin new file mode 100644 index 000000000..e69ccbe83 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/unkeyed-valid-path-info-1.15.bin differ diff --git a/unit-test-data/libstore/worker-protocol/valid-path-info-1.15.bin b/unit-test-data/libstore/worker-protocol/valid-path-info-1.15.bin new file mode 100644 index 000000000..7adc8dd44 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/valid-path-info-1.15.bin differ diff --git a/unit-test-data/libstore/worker-protocol/valid-path-info-1.16.bin b/unit-test-data/libstore/worker-protocol/valid-path-info-1.16.bin new file mode 100644 index 000000000..a72de6bd6 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/valid-path-info-1.16.bin differ diff --git a/unit-test-data/libstore/worker-protocol/vector.bin b/unit-test-data/libstore/worker-protocol/vector.bin new file mode 100644 index 000000000..7a37c8cd1 Binary files /dev/null and b/unit-test-data/libstore/worker-protocol/vector.bin differ