From 15833516a4bad0a4ae7786293b22df4bf650aa80 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Tue, 5 Feb 2019 16:42:45 -0500 Subject: [PATCH 001/510] Add armv6l-linux & armv7l-linux as cross jobs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is a cheap way to get 32-bit ARM working. We don’t support it officially but lots of people have raspberry pis and similar hardware they want to install the Nix package manager on. --- release.nix | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/release.nix b/release.nix index 271645067..0ec742906 100644 --- a/release.nix +++ b/release.nix @@ -2,6 +2,7 @@ , nixpkgs ? builtins.fetchGit { url = https://github.com/NixOS/nixpkgs-channels.git; ref = "nixos-18.09"; } , officialRelease ? false , systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ] +, crossSystems ? [ "armv6l-linux" "armv7l-linux" ] }: let @@ -53,11 +54,12 @@ let }; - build = pkgs.lib.genAttrs systems (system: + build = pkgs.lib.genAttrs (systems ++ crossSystems) (system: - let pkgs = import nixpkgs { inherit system; }; in - - with pkgs; + let pkgs = if builtins.elem system systems + then import nixpkgs { inherit system; } + else import nixpkgs { crossSystem = { inherit system; }; }; + in with pkgs; with import ./release-common.nix { inherit pkgs; }; @@ -89,9 +91,12 @@ let }); - perlBindings = pkgs.lib.genAttrs systems (system: + perlBindings = pkgs.lib.genAttrs (systems ++ crossSystems) (system: - let pkgs = import nixpkgs { inherit system; }; in with pkgs; + let pkgs = if builtins.elem system systems + then import nixpkgs { inherit system; } + else import nixpkgs { crossSystem = { inherit system; }; }; + in with pkgs; releaseTools.nixBuild { name = "nix-perl"; @@ -112,9 +117,12 @@ let }); - binaryTarball = pkgs.lib.genAttrs systems (system: + binaryTarball = pkgs.lib.genAttrs (systems ++ crossSystems) (system: - with import nixpkgs { inherit system; }; + let pkgs = if builtins.elem system systems + then import nixpkgs { inherit system; } + else import nixpkgs { crossSystem = { inherit system; }; }; + in with pkgs; let toplevel = builtins.getAttr system jobs.build; From 1996af425ac8ddea1e8a591650e7d0caba2aa201 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Wed, 6 Feb 2019 21:43:47 -0500 Subject: [PATCH 002/510] Use buildPackages for native dependencies Unfortunately, releaseTools.nixBuild does not separate native and non-native build inputs. As an alternative, we can just use buildPackages to get the native version of some packages like: - pkgconfig - git - curl - utillinux --- release-common.nix | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/release-common.nix b/release-common.nix index 4c5565985..f2aa57c2c 100644 --- a/release-common.nix +++ b/release-common.nix @@ -50,14 +50,16 @@ rec { buildDeps = [ curl bzip2 xz brotli editline - openssl pkgconfig sqlite boehmgc + openssl sqlite boehmgc boost + buildPackages.pkgconfig + # Tests - git - mercurial + buildPackages.git + buildPackages.mercurial ] - ++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal] + ++ lib.optionals stdenv.isLinux [libseccomp buildPackages.utillinuxMinimal] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) ((aws-sdk-cpp.override { From e9072ded9749ab00cc397980e8a26f83d341efc0 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Wed, 6 Feb 2019 22:43:28 -0500 Subject: [PATCH 003/510] Use nativeBuildInputs --- release-common.nix | 16 +++++++++------- release.nix | 3 +++ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/release-common.nix b/release-common.nix index f2aa57c2c..707d36f95 100644 --- a/release-common.nix +++ b/release-common.nix @@ -47,19 +47,21 @@ rec { autoreconfHook ]; + nativeBuildDeps = + [ buildPackages.pkgconfig + + # Tests + buildPackages.git + buildPackages.mercurial + ] ++ lib.optional stdenv.isLinux buildPackages.utillinuxMinimal; + buildDeps = [ curl bzip2 xz brotli editline openssl sqlite boehmgc boost - - buildPackages.pkgconfig - - # Tests - buildPackages.git - buildPackages.mercurial ] - ++ lib.optionals stdenv.isLinux [libseccomp buildPackages.utillinuxMinimal] + ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) ((aws-sdk-cpp.override { diff --git a/release.nix b/release.nix index 0ec742906..9843efa29 100644 --- a/release.nix +++ b/release.nix @@ -24,6 +24,7 @@ let src = nix; inherit officialRelease; + nativeBuildInputs = nativeBuildDeps; buildInputs = tarballDeps ++ buildDeps; configureFlags = "--enable-gc"; @@ -67,6 +68,7 @@ let name = "nix"; src = tarball; + nativeBuildInputs = nativeBuildDeps; buildInputs = buildDeps; preConfigure = @@ -199,6 +201,7 @@ let name = "nix-build"; src = tarball; + nativeBuildInputs = nativeBuildDeps; buildInputs = buildDeps; dontInstall = false; From f6ea56dfac1f4df45a5fa9e2801bc632dee9eff7 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Wed, 6 Feb 2019 23:04:40 -0500 Subject: [PATCH 004/510] Get shellcheck from buildPackages --- release.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/release.nix b/release.nix index 9843efa29..06db7bdc0 100644 --- a/release.nix +++ b/release.nix @@ -133,7 +133,7 @@ let in runCommand "nix-binary-tarball-${version}" - { nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; + { nativeBuildInputs = lib.optional (system != "aarch64-linux") buildPackages.shellcheck; meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; } '' From 4fefe26717fa70828e3f524e43c76e3f7b7a09b0 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Fri, 5 Feb 2021 18:22:34 -0600 Subject: [PATCH 005/510] Re-enable armv6l support This fixes the libatomic detection. --- configure.ac | 2 +- flake.nix | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configure.ac b/configure.ac index 2047ed8d2..685c471c5 100644 --- a/configure.ac +++ b/configure.ac @@ -152,7 +152,7 @@ int main() { }]])], GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=no, GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=yes) AC_MSG_RESULT($GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC) if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then - LIBS="-latomic $LIBS" + LDFLAGS="$LDFLAGS -latomic" fi PKG_PROG_PKG_CONFIG diff --git a/flake.nix b/flake.nix index 869b92cb7..7e02fd70d 100644 --- a/flake.nix +++ b/flake.nix @@ -20,7 +20,7 @@ linuxSystems = linux64BitSystems ++ [ "i686-linux" ]; systems = linuxSystems ++ [ "x86_64-darwin" ]; - crossSystems = [ "armv7l-linux" ]; + crossSystems = [ "armv6l-linux" "armv7l-linux" ]; forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system); From 1130b2882415b003f5ba2fc0b5466b573fe1b05a Mon Sep 17 00:00:00 2001 From: Graham Christensen Date: Wed, 24 Feb 2021 20:52:22 -0500 Subject: [PATCH 006/510] distributed builds: load remote builder host key from the machines file This is already used by Hydra, and is very useful when materializing a remote builder list from service discovery. This allows the service discovery tool to only sync one file instead of two. --- .../src/advanced-topics/distributed-builds.md | 10 +++++++--- src/libstore/legacy-ssh-store.cc | 2 ++ src/libstore/machines.cc | 6 ++++++ src/libstore/ssh-store.cc | 2 ++ src/libstore/ssh.cc | 16 ++++++++++++++-- src/libstore/ssh.hh | 3 ++- 6 files changed, 33 insertions(+), 6 deletions(-) diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md index c6966a50b..580b36736 100644 --- a/doc/manual/src/advanced-topics/distributed-builds.md +++ b/doc/manual/src/advanced-topics/distributed-builds.md @@ -37,7 +37,7 @@ then you need to ensure that the `PATH` of non-interactive login shells contains Nix. > **Warning** -> +> > If you are building via the Nix daemon, it is the Nix daemon user > account (that is, `root`) that should have SSH access to the remote > machine. If you can’t or don’t want to configure `root` to be able to @@ -52,7 +52,7 @@ example, the following command allows you to build a derivation for ```console $ uname Linux - + $ nix build \ '(with import { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \ --builders 'ssh://mac x86_64-darwin' @@ -103,7 +103,7 @@ default, set it to `-`. ```nix requiredSystemFeatures = [ "kvm" ]; ``` - + will cause the build to be performed on a machine that has the `kvm` feature. @@ -112,6 +112,10 @@ default, set it to `-`. features appear in the derivation’s `requiredSystemFeatures` attribute.. +8. The (base64-encoded) public host key of the remote machine. If omitted, SSH + will use its regular known-hosts file. Specifically, the field is calculated + via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`. + For example, the machine specification nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 253c0033e..99b0bb5a8 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -15,6 +15,7 @@ struct LegacySSHStoreConfig : virtual StoreConfig using StoreConfig::StoreConfig; const Setting maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"}; const Setting sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"}; + const Setting sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"}; const Setting compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"}; const Setting remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"}; const Setting remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"}; @@ -59,6 +60,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor , master( host, sshKey, + sshPublicHostKey, // Use SSH master only if using more than 1 connection. connections->capacity() > 1, compress, diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 7db2556f4..b42e5e434 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -54,9 +54,15 @@ ref Machine::openStore() const { if (hasPrefix(storeUri, "ssh://")) { storeParams["max-connections"] = "1"; storeParams["log-fd"] = "4"; + } + + if (hasPrefix(storeUri, "ssh://") || hasPrefix(storeUri, "ssh-ng://")) { if (sshKey != "") storeParams["ssh-key"] = sshKey; + if (sshPublicHostKey != "") + storeParams["base64-ssh-public-host-key"] = sshPublicHostKey; } + { auto & fs = storeParams["system-features"]; auto append = [&](auto feats) { diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 17c258201..f2caf2aeb 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -13,6 +13,7 @@ struct SSHStoreConfig : virtual RemoteStoreConfig using RemoteStoreConfig::RemoteStoreConfig; const Setting sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"}; + const Setting sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", "The public half of the host's SSH key"}; const Setting compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"}; const Setting remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"}; const Setting remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"}; @@ -34,6 +35,7 @@ public: , master( host, sshKey, + sshPublicHostKey, // Use SSH master only if using more than 1 connection. connections->capacity() > 1, compress) diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 84548a6e4..235eed37a 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -2,24 +2,37 @@ namespace nix { -SSHMaster::SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress, int logFD) +SSHMaster::SSHMaster(const std::string & host, const std::string & keyFile, const std::string & sshPublicHostKey, bool useMaster, bool compress, int logFD) : host(host) , fakeSSH(host == "localhost") , keyFile(keyFile) + , sshPublicHostKey(sshPublicHostKey) , useMaster(useMaster && !fakeSSH) , compress(compress) , logFD(logFD) { if (host == "" || hasPrefix(host, "-")) throw Error("invalid SSH host name '%s'", host); + + auto state(state_.lock()); + state->tmpDir = std::make_unique(createTempDir("", "nix", true, true, 0700)); } void SSHMaster::addCommonSSHOpts(Strings & args) { + auto state(state_.lock()); + for (auto & i : tokenizeString(getEnv("NIX_SSHOPTS").value_or(""))) args.push_back(i); if (!keyFile.empty()) args.insert(args.end(), {"-i", keyFile}); + if (!sshPublicHostKey.empty()) { + Path fileName = (Path) *state->tmpDir + "/host-key"; + auto p = host.rfind("@"); + string thost = p != string::npos ? string(host, p + 1) : host; + writeFile(fileName, thost + " " + base64Decode(sshPublicHostKey) + "\n"); + args.insert(args.end(), {"-oUserKnownHostsFile=" + fileName}); + } if (compress) args.push_back("-C"); } @@ -87,7 +100,6 @@ Path SSHMaster::startMaster() if (state->sshMaster != -1) return state->socketPath; - state->tmpDir = std::make_unique(createTempDir("", "nix", true, true, 0700)); state->socketPath = (Path) *state->tmpDir + "/ssh.sock"; diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index 4f0f0bd29..dabbcedda 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -12,6 +12,7 @@ private: const std::string host; bool fakeSSH; const std::string keyFile; + const std::string sshPublicHostKey; const bool useMaster; const bool compress; const int logFD; @@ -29,7 +30,7 @@ private: public: - SSHMaster(const std::string & host, const std::string & keyFile, bool useMaster, bool compress, int logFD = -1); + SSHMaster(const std::string & host, const std::string & keyFile, const std::string & sshPublicHostKey, bool useMaster, bool compress, int logFD = -1); struct Connection { From 12ec962dd8a6d8058ba11e517d74f6a07b3dc903 Mon Sep 17 00:00:00 2001 From: "Travis A. Everett" Date: Thu, 25 Feb 2021 16:12:51 -0600 Subject: [PATCH 007/510] simplify changing cachix cache for install tests - convert cachix cache name from an env into a secret so it (along with the token/key) can be set once per fork - use CACHIX_AUTH_TOKEN in addition to CACHIX_SIGNING_KEY; it looks like cachix will try signing key first, then auth token. --- .github/workflows/test.yml | 36 +++++++++++++++++++++++------------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bde6106e0..2531a7d35 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,52 +8,62 @@ jobs: matrix: os: [ubuntu-latest, macos-latest] runs-on: ${{ matrix.os }} - env: - CACHIX_NAME: nix-ci + steps: - uses: actions/checkout@v2.3.4 with: fetch-depth: 0 - uses: cachix/install-nix-action@v12 + - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - uses: cachix/cachix-action@v8 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' #- run: nix flake check - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi) - installer: - if: github.event_name == 'push' - needs: tests + check_cachix: + name: Cachix secret present for installer tests + runs-on: ubuntu-latest + outputs: + secret: ${{ steps.secret.outputs.secret }} + steps: + - name: Check for Cachix secret + id: secret + env: + _CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }} + run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}" + installer: + needs: [tests, check_cachix] + if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true' runs-on: ubuntu-latest - env: - CACHIX_NAME: nix-ci outputs: installerURL: ${{ steps.prepare-installer.outputs.installerURL }} steps: - uses: actions/checkout@v2.3.4 with: fetch-depth: 0 + - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - uses: cachix/install-nix-action@v12 - uses: cachix/cachix-action@v8 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - id: prepare-installer run: scripts/prepare-installer-for-github-actions installer_test: - if: github.event_name == 'push' - needs: installer + needs: [installer, check_cachix] + if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true' strategy: matrix: os: [ubuntu-latest, macos-latest] runs-on: ${{ matrix.os }} - env: - CACHIX_NAME: nix-ci steps: - uses: actions/checkout@v2.3.4 + - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - uses: cachix/install-nix-action@master with: install_url: '${{needs.installer.outputs.installerURL}}' - install_options: '--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve' + install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" - run: nix-instantiate -E 'builtins.currentTime' --eval - \ No newline at end of file From bd0b0f9ab7655553f64f158d5d9a9445f5604abd Mon Sep 17 00:00:00 2001 From: Puck Meerburg Date: Fri, 26 Feb 2021 21:48:41 +0000 Subject: [PATCH 008/510] mk: add support for CPPFLAGS --- mk/lib.mk | 1 + mk/patterns.mk | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/mk/lib.mk b/mk/lib.mk index a09ebaa97..6b92136cd 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -153,4 +153,5 @@ endif @echo " CFLAGS: Flags for the C compiler" @echo " CXX ($(CXX)): C++ compiler to be used" @echo " CXXFLAGS: Flags for the C++ compiler" + @echo " CPPFLAGS: C preprocessor flags, used for both CC and CXX" @$(print-var-help) diff --git a/mk/patterns.mk b/mk/patterns.mk index 7319f4cdd..86a724806 100644 --- a/mk/patterns.mk +++ b/mk/patterns.mk @@ -1,11 +1,11 @@ $(buildprefix)%.o: %.cc @mkdir -p "$(dir $@)" - $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP + $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP $(buildprefix)%.o: %.cpp @mkdir -p "$(dir $@)" - $(trace-cxx) $(CXX) -o $@ -c $< $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP + $(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP $(buildprefix)%.o: %.c @mkdir -p "$(dir $@)" - $(trace-cc) $(CC) -o $@ -c $< $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP + $(trace-cc) $(CC) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP From 7241fdc3d2386d256ca8870ca955b498d0ac2ff7 Mon Sep 17 00:00:00 2001 From: Puck Meerburg Date: Fri, 26 Feb 2021 22:06:06 +0000 Subject: [PATCH 009/510] Properly propagate libseccomp linker flags --- Makefile.config.in | 1 + src/libstore/local.mk | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile.config.in b/Makefile.config.in index 9d0500e48..3c1f01d1e 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -17,6 +17,7 @@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ LIBCURL_LIBS = @LIBCURL_LIBS@ LIBLZMA_LIBS = @LIBLZMA_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ +LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ SHELL = @bash@ diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 03c4351ac..cf0933705 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -28,7 +28,7 @@ ifeq ($(OS), SunOS) endif ifeq ($(HAVE_SECCOMP), 1) - libstore_LDFLAGS += -lseccomp + libstore_LDFLAGS += $(LIBSECCOMP_LIBS) endif libstore_CXXFLAGS += \ From 2d7917f035c7396e87546b130317a2e5234afa36 Mon Sep 17 00:00:00 2001 From: Puck Meerburg Date: Fri, 26 Feb 2021 21:42:51 +0000 Subject: [PATCH 010/510] Revert "Add support for building JARs from Java sources" This reverts commit 259086de841d155f7951c2cc50f799a4631aa512. --- mk/jars.mk | 36 ------------------------------------ mk/lib.mk | 12 +----------- mk/tracing.mk | 2 -- 3 files changed, 1 insertion(+), 49 deletions(-) delete mode 100644 mk/jars.mk diff --git a/mk/jars.mk b/mk/jars.mk deleted file mode 100644 index c8513e664..000000000 --- a/mk/jars.mk +++ /dev/null @@ -1,36 +0,0 @@ -define build-jar - - $(1)_NAME ?= $(1) - - _d := $$(strip $$($(1)_DIR)) - - $(1)_PATH := $$(_d)/$$($(1)_NAME).jar - - $(1)_TMPDIR := $$(_d)/.$$($(1)_NAME).jar.tmp - - _jars := $$(foreach jar, $$($(1)_JARS), $$($$(jar)_PATH)) - - $$($(1)_PATH): $$($(1)_SOURCES) $$(_jars) $$($(1)_EXTRA_DEPS)| $$($(1)_ORDER_AFTER) - @rm -rf $$($(1)_TMPDIR) - @mkdir -p $$($(1)_TMPDIR) - $$(trace-javac) javac $(GLOBAL_JAVACFLAGS) $$($(1)_JAVACFLAGS) -d $$($(1)_TMPDIR) \ - $$(foreach fn, $$($(1)_SOURCES), '$$(fn)') \ - -cp "$$(subst $$(space),,$$(foreach jar,$$($(1)_JARS),$$($$(jar)_PATH):))$$$$CLASSPATH" - @echo -e '$$(subst $$(newline),\n,$$($(1)_MANIFEST))' > $$($(1)_PATH).manifest - $$(trace-jar) jar cfm $$($(1)_PATH) $$($(1)_PATH).manifest -C $$($(1)_TMPDIR) . - @rm $$($(1)_PATH).manifest - @rm -rf $$($(1)_TMPDIR) - - $(1)_INSTALL_DIR ?= $$(jardir) - - $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME).jar - - $$(eval $$(call install-file-as, $$($(1)_PATH), $$($(1)_INSTALL_PATH), 0644)) - - install: $$($(1)_INSTALL_PATH) - - jars-list += $$($(1)_PATH) - - clean-files += $$($(1)_PATH) - -endef diff --git a/mk/lib.mk b/mk/lib.mk index a09ebaa97..6a1c465b6 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -31,7 +31,6 @@ libdir ?= $(prefix)/lib bindir ?= $(prefix)/bin libexecdir ?= $(prefix)/libexec datadir ?= $(prefix)/share -jardir ?= $(datadir)/java localstatedir ?= $(prefix)/var sysconfdir ?= $(prefix)/etc mandir ?= $(prefix)/share/man @@ -74,7 +73,6 @@ BUILD_DEBUG ?= 1 ifeq ($(BUILD_DEBUG), 1) GLOBAL_CFLAGS += -g GLOBAL_CXXFLAGS += -g - GLOBAL_JAVACFLAGS += -g endif @@ -84,7 +82,6 @@ include mk/clean.mk include mk/install.mk include mk/libraries.mk include mk/programs.mk -include mk/jars.mk include mk/patterns.mk include mk/templates.mk include mk/tests.mk @@ -102,7 +99,6 @@ $(foreach mf, $(makefiles), $(eval $(call include-sub-makefile, $(mf)))) # Instantiate stuff. $(foreach lib, $(libraries), $(eval $(call build-library,$(lib)))) $(foreach prog, $(programs), $(eval $(call build-program,$(prog)))) -$(foreach jar, $(jars), $(eval $(call build-jar,$(jar)))) $(foreach script, $(bin-scripts), $(eval $(call install-program-in,$(script),$(bindir)))) $(foreach script, $(bin-scripts), $(eval programs-list += $(script))) $(foreach script, $(noinst-scripts), $(eval programs-list += $(script))) @@ -113,7 +109,7 @@ $(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/ .PHONY: default all man help -all: $(programs-list) $(libs-list) $(jars-list) $(man-pages) +all: $(programs-list) $(libs-list) $(man-pages) man: $(man-pages) @@ -137,12 +133,6 @@ ifdef libs-list @echo "The following libraries can be built:" @echo "" @for i in $(libs-list); do echo " $$i"; done -endif -ifdef jars-list - @echo "" - @echo "The following JARs can be built:" - @echo "" - @for i in $(jars-list); do echo " $$i"; done endif @echo "" @echo "The following variables control the build:" diff --git a/mk/tracing.mk b/mk/tracing.mk index 54c77ab60..1fc5573d7 100644 --- a/mk/tracing.mk +++ b/mk/tracing.mk @@ -8,8 +8,6 @@ ifeq ($(V), 0) trace-ld = @echo " LD " $@; trace-ar = @echo " AR " $@; trace-install = @echo " INST " $@; - trace-javac = @echo " JAVAC " $@; - trace-jar = @echo " JAR " $@; trace-mkdir = @echo " MKDIR " $@; trace-test = @echo " TEST " $@; From 4bbd80c5366711b8f1b5ad108ba22206d3bee783 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 12 Feb 2021 21:50:50 +0000 Subject: [PATCH 011/510] Throw error for derivation goal with bogus wanted output --- src/libstore/build/derivation-goal.cc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index c29237f5c..530f8829a 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -1243,9 +1243,12 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() void DerivationGoal::checkPathValidity() { bool checkHash = buildMode == bmRepair; + auto wantedOutputsLeft = wantedOutputs; for (auto & i : queryPartialDerivationOutputMap()) { InitialOutput & info = initialOutputs.at(i.first); info.wanted = wantOutput(i.first, wantedOutputs); + if (info.wanted) + wantedOutputsLeft.erase(i.first); if (i.second) { auto outputPath = *i.second; info.known = { @@ -1267,6 +1270,11 @@ void DerivationGoal::checkPathValidity() } } } + // If we requested all the outputs via the empty set, we are always fine. + // If we requested specific elements, the loop above removes all the valid + // ones, so any that are left must be invalid. + if (!wantedOutputsLeft.empty()) + throw UsageError("some wanted outputs are not provided by the derivation: %s", concatStringsSep(", ", wantedOutputsLeft)); } From 259d6778efd865ccd3b5fbf4f3a29002a7d58d93 Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 9 Nov 2020 16:04:18 +0100 Subject: [PATCH 012/510] Move the CA tests to a sub-directory Requires a slight update to the test infra to work properly, but having the possibility to group tests that way makes the whole thing quite cleaner imho --- mk/run_test.sh | 2 +- mk/tests.mk | 2 +- tests/{content-addressed.sh => ca/build.sh} | 2 ++ tests/ca/common.sh | 1 + tests/{ => ca}/content-addressed.nix | 2 +- tests/{nix-copy-content-addressed.sh => ca/nix-copy.sh} | 0 tests/common.sh.in | 2 +- tests/local.mk | 6 +++--- 8 files changed, 10 insertions(+), 7 deletions(-) rename tests/{content-addressed.sh => ca/build.sh} (98%) create mode 100644 tests/ca/common.sh rename tests/{ => ca}/content-addressed.nix (98%) rename tests/{nix-copy-content-addressed.sh => ca/nix-copy.sh} (100%) diff --git a/mk/run_test.sh b/mk/run_test.sh index 6af5b070a..3783d3bf7 100755 --- a/mk/run_test.sh +++ b/mk/run_test.sh @@ -14,7 +14,7 @@ if [ -t 1 ]; then yellow="" normal="" fi -(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null) +(cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null) log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)" status=$? if [ $status -eq 0 ]; then diff --git a/mk/tests.mk b/mk/tests.mk index c1e140bac..21bdc5748 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -8,7 +8,7 @@ define run-install-test .PHONY: $1.test $1.test: $1 $(test-deps) - @env TEST_NAME=$(notdir $(basename $1)) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null + @env TEST_NAME=$(basename $1) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null endef diff --git a/tests/content-addressed.sh b/tests/ca/build.sh similarity index 98% rename from tests/content-addressed.sh rename to tests/ca/build.sh index 7e32e1f28..35bf1dcf7 100644 --- a/tests/content-addressed.sh +++ b/tests/ca/build.sh @@ -61,7 +61,9 @@ testNixCommand () { # Disabled until we have it properly working # testRemoteCache +clearStore testDeterministicCA +clearStore testCutoff testGC testNixCommand diff --git a/tests/ca/common.sh b/tests/ca/common.sh new file mode 100644 index 000000000..e083d873c --- /dev/null +++ b/tests/ca/common.sh @@ -0,0 +1 @@ +source ../common.sh diff --git a/tests/content-addressed.nix b/tests/ca/content-addressed.nix similarity index 98% rename from tests/content-addressed.nix rename to tests/ca/content-addressed.nix index 61079176f..e5b1c4de3 100644 --- a/tests/content-addressed.nix +++ b/tests/ca/content-addressed.nix @@ -1,4 +1,4 @@ -with import ./config.nix; +with import ../config.nix; { seed ? 0 }: # A simple content-addressed derivation. diff --git a/tests/nix-copy-content-addressed.sh b/tests/ca/nix-copy.sh similarity index 100% rename from tests/nix-copy-content-addressed.sh rename to tests/ca/nix-copy.sh diff --git a/tests/common.sh.in b/tests/common.sh.in index e3bcab507..de44a4da4 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -11,7 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var export NIX_LOG_DIR=$TEST_ROOT/var/log/nix export NIX_STATE_DIR=$TEST_ROOT/var/nix export NIX_CONF_DIR=$TEST_ROOT/etc -export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/daemon-socket +export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/dSocket unset NIX_USER_CONF_FILES export _NIX_TEST_SHARED=$TEST_ROOT/shared if [[ -n $NIX_STORE ]]; then diff --git a/tests/local.mk b/tests/local.mk index 7deea9ac1..07cfd7a50 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -38,10 +38,10 @@ nix_tests = \ recursive.sh \ describe-stores.sh \ flakes.sh \ - content-addressed.sh \ - nix-copy-content-addressed.sh \ build.sh \ - compute-levels.sh + compute-levels.sh \ + ca/build.sh \ + ca/nix-copy.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) From 5d1c05b07561c841c68eb3ff9698ce9d2355fe41 Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 9 Nov 2020 13:47:06 +0100 Subject: [PATCH 013/510] SubstitutionGoal -> PathSubstitutionGoal To prepare for the upcoming DrvOutputSubstitutionGoal --- src/libstore/build/derivation-goal.cc | 8 +++---- src/libstore/build/entry-points.cc | 8 +++---- src/libstore/build/substitution-goal.cc | 32 ++++++++++++------------- src/libstore/build/substitution-goal.hh | 9 +++---- src/libstore/build/worker.cc | 12 +++++----- src/libstore/build/worker.hh | 12 +++++----- src/libstore/local-store.hh | 2 ++ 7 files changed, 43 insertions(+), 40 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index c29237f5c..7b97e575a 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -170,7 +170,7 @@ void DerivationGoal::getDerivation() return; } - addWaitee(upcast_goal(worker.makeSubstitutionGoal(drvPath))); + addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath))); state = &DerivationGoal::loadDerivation; } @@ -253,7 +253,7 @@ void DerivationGoal::haveDerivation() /* Nothing to wait for; tail call */ return DerivationGoal::gaveUpOnSubstitution(); } - addWaitee(upcast_goal(worker.makeSubstitutionGoal( + addWaitee(upcast_goal(worker.makePathSubstitutionGoal( status.known->path, buildMode == bmRepair ? Repair : NoRepair, getDerivationCA(*drv)))); @@ -337,7 +337,7 @@ void DerivationGoal::gaveUpOnSubstitution() if (!settings.useSubstitutes) throw Error("dependency '%s' of '%s' does not exist, and substitution is disabled", worker.store.printStorePath(i), worker.store.printStorePath(drvPath)); - addWaitee(upcast_goal(worker.makeSubstitutionGoal(i))); + addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i))); } if (waitees.empty()) /* to prevent hang (no wake-up event) */ @@ -388,7 +388,7 @@ void DerivationGoal::repairClosure() worker.store.printStorePath(i), worker.store.printStorePath(drvPath)); auto drvPath2 = outputsToDrv.find(i); if (drvPath2 == outputsToDrv.end()) - addWaitee(upcast_goal(worker.makeSubstitutionGoal(i, Repair))); + addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair))); else addWaitee(worker.makeDerivationGoal(drvPath2->second, StringSet(), bmRepair)); } diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 01a564aba..686364440 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -15,7 +15,7 @@ void Store::buildPaths(const std::vector & drvPaths, Build if (path.path.isDerivation()) goals.insert(worker.makeDerivationGoal(path.path, path.outputs, buildMode)); else - goals.insert(worker.makeSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair)); + goals.insert(worker.makePathSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair)); } worker.run(goals); @@ -31,7 +31,7 @@ void Store::buildPaths(const std::vector & drvPaths, Build } if (i->exitCode != Goal::ecSuccess) { if (auto i2 = dynamic_cast(i.get())) failed.insert(i2->drvPath); - else if (auto i2 = dynamic_cast(i.get())) failed.insert(i2->storePath); + else if (auto i2 = dynamic_cast(i.get())) failed.insert(i2->storePath); } } @@ -90,7 +90,7 @@ void Store::ensurePath(const StorePath & path) if (isValidPath(path)) return; Worker worker(*this); - GoalPtr goal = worker.makeSubstitutionGoal(path); + GoalPtr goal = worker.makePathSubstitutionGoal(path); Goals goals = {goal}; worker.run(goals); @@ -108,7 +108,7 @@ void Store::ensurePath(const StorePath & path) void LocalStore::repairPath(const StorePath & path) { Worker worker(*this); - GoalPtr goal = worker.makeSubstitutionGoal(path, Repair); + GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair); Goals goals = {goal}; worker.run(goals); diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index c4b0de78d..5d88b8758 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -5,20 +5,20 @@ namespace nix { -SubstitutionGoal::SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional ca) +PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional ca) : Goal(worker) , storePath(storePath) , repair(repair) , ca(ca) { - state = &SubstitutionGoal::init; + state = &PathSubstitutionGoal::init; name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath)); trace("created"); maintainExpectedSubstitutions = std::make_unique>(worker.expectedSubstitutions); } -SubstitutionGoal::~SubstitutionGoal() +PathSubstitutionGoal::~PathSubstitutionGoal() { try { if (thr.joinable()) { @@ -32,13 +32,13 @@ SubstitutionGoal::~SubstitutionGoal() } -void SubstitutionGoal::work() +void PathSubstitutionGoal::work() { (this->*state)(); } -void SubstitutionGoal::init() +void PathSubstitutionGoal::init() { trace("init"); @@ -59,7 +59,7 @@ void SubstitutionGoal::init() } -void SubstitutionGoal::tryNext() +void PathSubstitutionGoal::tryNext() { trace("trying next substituter"); @@ -154,16 +154,16 @@ void SubstitutionGoal::tryNext() paths referenced by this one. */ for (auto & i : info->references) if (i != storePath) /* ignore self-references */ - addWaitee(worker.makeSubstitutionGoal(i)); + addWaitee(worker.makePathSubstitutionGoal(i)); if (waitees.empty()) /* to prevent hang (no wake-up event) */ referencesValid(); else - state = &SubstitutionGoal::referencesValid; + state = &PathSubstitutionGoal::referencesValid; } -void SubstitutionGoal::referencesValid() +void PathSubstitutionGoal::referencesValid() { trace("all references realised"); @@ -177,12 +177,12 @@ void SubstitutionGoal::referencesValid() if (i != storePath) /* ignore self-references */ assert(worker.store.isValidPath(i)); - state = &SubstitutionGoal::tryToRun; + state = &PathSubstitutionGoal::tryToRun; worker.wakeUp(shared_from_this()); } -void SubstitutionGoal::tryToRun() +void PathSubstitutionGoal::tryToRun() { trace("trying to run"); @@ -221,11 +221,11 @@ void SubstitutionGoal::tryToRun() worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false); - state = &SubstitutionGoal::finished; + state = &PathSubstitutionGoal::finished; } -void SubstitutionGoal::finished() +void PathSubstitutionGoal::finished() { trace("substitute finished"); @@ -249,7 +249,7 @@ void SubstitutionGoal::finished() } /* Try the next substitute. */ - state = &SubstitutionGoal::tryNext; + state = &PathSubstitutionGoal::tryNext; worker.wakeUp(shared_from_this()); return; } @@ -278,12 +278,12 @@ void SubstitutionGoal::finished() } -void SubstitutionGoal::handleChildOutput(int fd, const string & data) +void PathSubstitutionGoal::handleChildOutput(int fd, const string & data) { } -void SubstitutionGoal::handleEOF(int fd) +void PathSubstitutionGoal::handleEOF(int fd) { if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this()); } diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh index dee2cecbf..3b3cb7e32 100644 --- a/src/libstore/build/substitution-goal.hh +++ b/src/libstore/build/substitution-goal.hh @@ -8,7 +8,7 @@ namespace nix { class Worker; -struct SubstitutionGoal : public Goal +struct PathSubstitutionGoal : public Goal { /* The store path that should be realised through a substitute. */ StorePath storePath; @@ -47,14 +47,15 @@ struct SubstitutionGoal : public Goal std::unique_ptr> maintainExpectedSubstitutions, maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload; - typedef void (SubstitutionGoal::*GoalState)(); + typedef void (PathSubstitutionGoal::*GoalState)(); GoalState state; /* Content address for recomputing store path */ std::optional ca; - SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); - ~SubstitutionGoal(); +public: + PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + ~PathSubstitutionGoal(); void timedOut(Error && ex) override { abort(); }; diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index b2223c3b6..619b1d69c 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -78,12 +78,12 @@ std::shared_ptr Worker::makeBasicDerivationGoal(const StorePath } -std::shared_ptr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional ca) +std::shared_ptr Worker::makePathSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional ca) { - std::weak_ptr & goal_weak = substitutionGoals[path]; + std::weak_ptr & goal_weak = substitutionGoals[path]; auto goal = goal_weak.lock(); // FIXME if (!goal) { - goal = std::make_shared(path, *this, repair, ca); + goal = std::make_shared(path, *this, repair, ca); goal_weak = goal; wakeUp(goal); } @@ -109,7 +109,7 @@ void Worker::removeGoal(GoalPtr goal) { if (auto drvGoal = std::dynamic_pointer_cast(goal)) nix::removeGoal(drvGoal, derivationGoals); - else if (auto subGoal = std::dynamic_pointer_cast(goal)) + else if (auto subGoal = std::dynamic_pointer_cast(goal)) nix::removeGoal(subGoal, substitutionGoals); else assert(false); @@ -217,7 +217,7 @@ void Worker::run(const Goals & _topGoals) topGoals.insert(i); if (auto goal = dynamic_cast(i.get())) { topPaths.push_back({goal->drvPath, goal->wantedOutputs}); - } else if (auto goal = dynamic_cast(i.get())) { + } else if (auto goal = dynamic_cast(i.get())) { topPaths.push_back({goal->storePath}); } } @@ -471,7 +471,7 @@ void Worker::markContentsGood(const StorePath & path) } -GoalPtr upcast_goal(std::shared_ptr subGoal) { +GoalPtr upcast_goal(std::shared_ptr subGoal) { return subGoal; } diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 82e711191..42acf8542 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -12,18 +12,18 @@ namespace nix { /* Forward definition. */ struct DerivationGoal; -struct SubstitutionGoal; +struct PathSubstitutionGoal; /* Workaround for not being able to declare a something like - class SubstitutionGoal : public Goal; + class PathSubstitutionGoal : public Goal; even when Goal is a complete type. This is still a static cast. The purpose of exporting it is to define it in - a place where `SubstitutionGoal` is concrete, and use it in a place where it + a place where `PathSubstitutionGoal` is concrete, and use it in a place where it is opaque. */ -GoalPtr upcast_goal(std::shared_ptr subGoal); +GoalPtr upcast_goal(std::shared_ptr subGoal); typedef std::chrono::time_point steady_time_point; @@ -72,7 +72,7 @@ private: /* Maps used to prevent multiple instantiations of a goal for the same derivation / path. */ std::map> derivationGoals; - std::map> substitutionGoals; + std::map> substitutionGoals; /* Goals waiting for busy paths to be unlocked. */ WeakGoals waitingForAnyGoal; @@ -146,7 +146,7 @@ public: const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); /* substitution goal */ - std::shared_ptr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + std::shared_ptr makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); /* Remove a dead goal. */ void removeGoal(GoalPtr goal); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 03bb0218d..fc67f215a 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -281,7 +281,9 @@ private: void createUser(const std::string & userName, uid_t userId) override; friend struct LocalDerivationGoal; + friend struct PathSubstitutionGoal; friend struct SubstitutionGoal; + friend struct DerivationGoal; }; From df9d4f88d5aed0aa4ed67eb012e9f260550b7200 Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 9 Nov 2020 15:40:10 +0100 Subject: [PATCH 014/510] Allow substituting drv outputs when building --- src/libstore/build/derivation-goal.cc | 25 +++-- .../build/drv-output-substitution-goal.cc | 95 +++++++++++++++++++ .../build/drv-output-substitution-goal.hh | 50 ++++++++++ src/libstore/build/worker.cc | 22 ++++- src/libstore/build/worker.hh | 5 + 5 files changed, 185 insertions(+), 12 deletions(-) create mode 100644 src/libstore/build/drv-output-substitution-goal.cc create mode 100644 src/libstore/build/drv-output-substitution-goal.hh diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 7b97e575a..7dcd2a6eb 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -246,17 +246,22 @@ void DerivationGoal::haveDerivation() through substitutes. If that doesn't work, we'll build them. */ if (settings.useSubstitutes && parsedDrv->substitutesAllowed()) - for (auto & [_, status] : initialOutputs) { + for (auto & [outputName, status] : initialOutputs) { if (!status.wanted) continue; - if (!status.known) { - warn("do not know how to query for unknown floating content-addressed derivation output yet"); - /* Nothing to wait for; tail call */ - return DerivationGoal::gaveUpOnSubstitution(); - } - addWaitee(upcast_goal(worker.makePathSubstitutionGoal( - status.known->path, - buildMode == bmRepair ? Repair : NoRepair, - getDerivationCA(*drv)))); + if (!status.known) + addWaitee( + upcast_goal( + worker.makeDrvOutputSubstitutionGoal( + DrvOutput{status.outputHash, outputName}, + buildMode == bmRepair ? Repair : NoRepair + ) + ) + ); + else + addWaitee(upcast_goal(worker.makePathSubstitutionGoal( + status.known->path, + buildMode == bmRepair ? Repair : NoRepair, + getDerivationCA(*drv)))); } if (waitees.empty()) /* to prevent hang (no wake-up event) */ diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc new file mode 100644 index 000000000..a5ac4c49d --- /dev/null +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -0,0 +1,95 @@ +#include "drv-output-substitution-goal.hh" +#include "worker.hh" +#include "substitution-goal.hh" + +namespace nix { + +DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair, std::optional ca) + : Goal(worker) + , id(id) +{ + state = &DrvOutputSubstitutionGoal::init; + name = fmt("substitution of '%s'", id.to_string()); + trace("created"); +} + + +void DrvOutputSubstitutionGoal::init() +{ + trace("init"); + subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list>(); + tryNext(); +} + +void DrvOutputSubstitutionGoal::tryNext() +{ + trace("Trying next substituter"); + + if (subs.size() == 0) { + /* None left. Terminate this goal and let someone else deal + with it. */ + debug("drv output '%s' is required, but there is no substituter that can provide it", id.to_string()); + + /* Hack: don't indicate failure if there were no substituters. + In that case the calling derivation should just do a + build. */ + amDone(substituterFailed ? ecFailed : ecNoSubstituters); + + if (substituterFailed) { + worker.failedSubstitutions++; + worker.updateProgress(); + } + + return; + } + + auto sub = subs.front(); + subs.pop_front(); + + // FIXME: Make async + outputInfo = sub->queryRealisation(id); + if (!outputInfo) { + tryNext(); + return; + } + + addWaitee(worker.makePathSubstitutionGoal(outputInfo->outPath)); + + if (waitees.empty()) outPathValid(); + else state = &DrvOutputSubstitutionGoal::outPathValid; +} + +void DrvOutputSubstitutionGoal::outPathValid() +{ + assert(outputInfo); + trace("Output path substituted"); + + if (nrFailed > 0) { + debug("The output path of the derivation output '%s' could not be substituted", id.to_string()); + amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed); + return; + } + + worker.store.registerDrvOutput(*outputInfo); + finished(); +} + +void DrvOutputSubstitutionGoal::finished() +{ + trace("finished"); + amDone(ecSuccess); +} + +string DrvOutputSubstitutionGoal::key() +{ + /* "a$" ensures substitution goals happen before derivation + goals. */ + return "a$" + std::string(id.to_string()); +} + +void DrvOutputSubstitutionGoal::work() +{ + (this->*state)(); +} + +} diff --git a/src/libstore/build/drv-output-substitution-goal.hh b/src/libstore/build/drv-output-substitution-goal.hh new file mode 100644 index 000000000..63ab53d89 --- /dev/null +++ b/src/libstore/build/drv-output-substitution-goal.hh @@ -0,0 +1,50 @@ +#pragma once + +#include "store-api.hh" +#include "goal.hh" +#include "realisation.hh" + +namespace nix { + +class Worker; + +// Substitution of a derivation output. +// This is done in three steps: +// 1. Fetch the output info from a substituter +// 2. Substitute the corresponding output path +// 3. Register the output info +class DrvOutputSubstitutionGoal : public Goal { +private: + // The drv output we're trying to substitue + DrvOutput id; + + // The realisation corresponding to the given output id. + // Will be filled once we can get it. + std::optional outputInfo; + + /* The remaining substituters. */ + std::list> subs; + + /* Whether a substituter failed. */ + bool substituterFailed = false; + +public: + DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + + typedef void (DrvOutputSubstitutionGoal::*GoalState)(); + GoalState state; + + void init(); + void tryNext(); + void outPathValid(); + void finished(); + + void timedOut(Error && ex) override { abort(); }; + + string key() override; + + void work() override; + +}; + +} diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 619b1d69c..616b17e61 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -1,6 +1,7 @@ #include "machines.hh" #include "worker.hh" #include "substitution-goal.hh" +#include "drv-output-substitution-goal.hh" #include "local-derivation-goal.hh" #include "hook-instance.hh" @@ -90,8 +91,20 @@ std::shared_ptr Worker::makePathSubstitutionGoal(const Sto return goal; } -template -static void removeGoal(std::shared_ptr goal, std::map> & goalMap) +std::shared_ptr Worker::makeDrvOutputSubstitutionGoal(const DrvOutput& id, RepairFlag repair, std::optional ca) +{ + std::weak_ptr & goal_weak = drvOutputSubstitutionGoals[id]; + auto goal = goal_weak.lock(); // FIXME + if (!goal) { + goal = std::make_shared(id, *this, repair, ca); + goal_weak = goal; + wakeUp(goal); + } + return goal; +} + +template +static void removeGoal(std::shared_ptr goal, std::map> & goalMap) { /* !!! inefficient */ for (auto i = goalMap.begin(); @@ -111,6 +124,8 @@ void Worker::removeGoal(GoalPtr goal) nix::removeGoal(drvGoal, derivationGoals); else if (auto subGoal = std::dynamic_pointer_cast(goal)) nix::removeGoal(subGoal, substitutionGoals); + else if (auto subGoal = std::dynamic_pointer_cast(goal)) + nix::removeGoal(subGoal, drvOutputSubstitutionGoals); else assert(false); if (topGoals.find(goal) != topGoals.end()) { @@ -474,5 +489,8 @@ void Worker::markContentsGood(const StorePath & path) GoalPtr upcast_goal(std::shared_ptr subGoal) { return subGoal; } +GoalPtr upcast_goal(std::shared_ptr subGoal) { + return subGoal; +} } diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 42acf8542..918de35f6 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -4,6 +4,7 @@ #include "lock.hh" #include "store-api.hh" #include "goal.hh" +#include "realisation.hh" #include #include @@ -13,6 +14,7 @@ namespace nix { /* Forward definition. */ struct DerivationGoal; struct PathSubstitutionGoal; +class DrvOutputSubstitutionGoal; /* Workaround for not being able to declare a something like @@ -24,6 +26,7 @@ struct PathSubstitutionGoal; a place where `PathSubstitutionGoal` is concrete, and use it in a place where it is opaque. */ GoalPtr upcast_goal(std::shared_ptr subGoal); +GoalPtr upcast_goal(std::shared_ptr subGoal); typedef std::chrono::time_point steady_time_point; @@ -73,6 +76,7 @@ private: same derivation / path. */ std::map> derivationGoals; std::map> substitutionGoals; + std::map> drvOutputSubstitutionGoals; /* Goals waiting for busy paths to be unlocked. */ WeakGoals waitingForAnyGoal; @@ -147,6 +151,7 @@ public: /* substitution goal */ std::shared_ptr makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); + std::shared_ptr makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); /* Remove a dead goal. */ void removeGoal(GoalPtr goal); From 93b5a59b674c0a29846828c7d14b434cc954f8ee Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 9 Nov 2020 16:04:43 +0100 Subject: [PATCH 015/510] Add a test for the remote caching of CA derivations --- tests/ca/substitute.sh | 21 +++++++++++++++++++++ tests/local.mk | 3 ++- tests/push-to-store.sh | 6 ++++-- 3 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 tests/ca/substitute.sh diff --git a/tests/ca/substitute.sh b/tests/ca/substitute.sh new file mode 100644 index 000000000..79a6ef8b1 --- /dev/null +++ b/tests/ca/substitute.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +# Ensure that binary substitution works properly with ca derivations + +source common.sh + +sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf + +export REMOTE_STORE=file://$TEST_ROOT/binary_cache + +buildDrvs () { + nix build --file ./content-addressed.nix -L --no-link "$@" +} + +# Populate the remote cache +buildDrvs --post-build-hook ../push-to-store.sh + +# Restart the build on an empty store, ensuring that we don't build +clearStore +buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0 + diff --git a/tests/local.mk b/tests/local.mk index 07cfd7a50..e17555051 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -41,7 +41,8 @@ nix_tests = \ build.sh \ compute-levels.sh \ ca/build.sh \ - ca/nix-copy.sh + ca/nix-copy.sh \ + ca/substitute.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) diff --git a/tests/push-to-store.sh b/tests/push-to-store.sh index 6aadb916b..25352c751 100755 --- a/tests/push-to-store.sh +++ b/tests/push-to-store.sh @@ -1,4 +1,6 @@ #!/bin/sh -echo Pushing "$@" to "$REMOTE_STORE" -printf "%s" "$OUT_PATHS" | xargs -d: nix copy --to "$REMOTE_STORE" --no-require-sigs +set -x + +echo Pushing "$OUT_PATHS" to "$REMOTE_STORE" +printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs From 9931f18c2dfff2642dea8e1a153eaaa58d7e3c8a Mon Sep 17 00:00:00 2001 From: Kjetil Orbekk Date: Sun, 21 Feb 2021 11:08:28 -0500 Subject: [PATCH 016/510] Add support for bare git repositories with git+file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Local git repositories are normally used directly instead of cloning. This commit checks if a repo is bare and forces a clone. Co-authored-by: Théophane Hufschmitt --- src/libfetchers/git.cc | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 81c647f89..4f9db1bcd 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -153,12 +153,14 @@ struct GitInputScheme : InputScheme std::pair getActualUrl(const Input & input) const { - // Don't clone file:// URIs (but otherwise treat them the - // same as remote URIs, i.e. don't use the working tree or - // HEAD). + // file:// URIs are normally not cloned (but otherwise treated the + // same as remote URIs, i.e. we don't use the working tree or + // HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git + // repo, treat as a remote URI to force a clone. static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing auto url = parseURL(getStrAttr(input.attrs, "url")); - bool isLocal = url.scheme == "file" && !forceHttp; + bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git"); + bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository; return {isLocal, isLocal ? url.path : url.base}; } From 92a234322f5a46b65825c748220cef40209eeacd Mon Sep 17 00:00:00 2001 From: Kjetil Orbekk Date: Sun, 21 Feb 2021 10:41:46 -0500 Subject: [PATCH 017/510] Add test for git+file with bare repository --- tests/flakes.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/flakes.sh b/tests/flakes.sh index 25ba2ac43..9747aba7a 100644 --- a/tests/flakes.sh +++ b/tests/flakes.sh @@ -25,6 +25,7 @@ templatesDir=$TEST_ROOT/templates nonFlakeDir=$TEST_ROOT/nonFlake flakeA=$TEST_ROOT/flakeA flakeB=$TEST_ROOT/flakeB +flakeGitBare=$TEST_ROOT/flakeGitBare for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB; do rm -rf $repo $repo.tmp @@ -604,6 +605,11 @@ nix flake update $flake3Dir [[ $(jq -c .nodes.flake2.inputs.flake1 $flake3Dir/flake.lock) =~ '["foo"]' ]] [[ $(jq .nodes.foo.locked.url $flake3Dir/flake.lock) =~ flake7 ]] +# Test git+file with bare repo. +rm -rf $flakeGitBare +git clone --bare $flake1Dir $flakeGitBare +nix build -o $TEST_ROOT/result git+file://$flakeGitBare + # Test Mercurial flakes. rm -rf $flake5Dir hg init $flake5Dir From 7ce10924c74e9e037b05558aeb5f0639df5955f6 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 1 Mar 2021 15:07:09 +0000 Subject: [PATCH 018/510] Fix bad wanted output error as requested - UsageError -> Error - include drv path too --- src/libstore/build/derivation-goal.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 530f8829a..4c3bccf25 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -1274,7 +1274,9 @@ void DerivationGoal::checkPathValidity() // If we requested specific elements, the loop above removes all the valid // ones, so any that are left must be invalid. if (!wantedOutputsLeft.empty()) - throw UsageError("some wanted outputs are not provided by the derivation: %s", concatStringsSep(", ", wantedOutputsLeft)); + throw Error("derivation '%s' does not have wanted outputs %s", + worker.store.printStorePath(drvPath), + concatStringsSep(", ", quoteStrings(wantedOutputsLeft))); } From fc6bfb261d50102016ed812ecf9949d41fe539f7 Mon Sep 17 00:00:00 2001 From: dramforever Date: Tue, 2 Mar 2021 21:56:50 +0800 Subject: [PATCH 019/510] libfetchers/tarball: Lock on effectiveUrl Basically, if a tarball URL is used as a flake input, and the URL leads to a redirect, the final redirect destination would be recorded as the locked URL. This allows tarballs under https://nixos.org/channels to be used as flake inputs. If we, as before, lock on to the original URL it would break every time the channel updates. --- src/libfetchers/fetchers.hh | 8 +++++++- src/libfetchers/github.cc | 6 +++--- src/libfetchers/tarball.cc | 19 ++++++++++++++----- 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index a72cfafa4..c6b219c02 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -145,7 +145,13 @@ DownloadFileResult downloadFile( bool immutable, const Headers & headers = {}); -std::pair downloadTarball( +struct DownloadTarballMeta +{ + time_t lastModified; + std::string effectiveUrl; +}; + +std::pair downloadTarball( ref store, const std::string & url, const std::string & name, diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 8352ef02d..3e5ad75a8 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -207,16 +207,16 @@ struct GitArchiveInputScheme : InputScheme auto url = getDownloadUrl(input); - auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers); + auto [tree, meta] = downloadTarball(store, url.url, "source", true, url.headers); - input.attrs.insert_or_assign("lastModified", uint64_t(lastModified)); + input.attrs.insert_or_assign("lastModified", uint64_t(meta.lastModified)); getCache()->add( store, immutableAttrs, { {"rev", rev->gitRev()}, - {"lastModified", uint64_t(lastModified)} + {"lastModified", uint64_t(meta.lastModified)} }, tree.storePath, true); diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index b8d7d2c70..bd05bb2f1 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -109,7 +109,7 @@ DownloadFileResult downloadFile( }; } -std::pair downloadTarball( +std::pair downloadTarball( ref store, const std::string & url, const std::string & name, @@ -127,7 +127,10 @@ std::pair downloadTarball( if (cached && !cached->expired) return { Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)), - getIntAttr(cached->infoAttrs, "lastModified") + { + .lastModified = time_t(getIntAttr(cached->infoAttrs, "lastModified")), + .effectiveUrl = maybeGetStrAttr(cached->infoAttrs, "effectiveUrl").value_or(url), + }, }; auto res = downloadFile(store, url, name, immutable, headers); @@ -152,6 +155,7 @@ std::pair downloadTarball( Attrs infoAttrs({ {"lastModified", uint64_t(lastModified)}, + {"effectiveUrl", res.effectiveUrl}, {"etag", res.etag}, }); @@ -164,7 +168,10 @@ std::pair downloadTarball( return { Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)), - lastModified, + { + .lastModified = lastModified, + .effectiveUrl = res.effectiveUrl, + }, }; } @@ -223,9 +230,11 @@ struct TarballInputScheme : InputScheme return true; } - std::pair fetch(ref store, const Input & input) override + std::pair fetch(ref store, const Input & _input) override { - auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first; + Input input(_input); + auto [tree, meta] = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false); + input.attrs.insert_or_assign("url", meta.effectiveUrl); return {std::move(tree), input}; } }; From 7331da99abead2b59efcfdaf729cb1034642b630 Mon Sep 17 00:00:00 2001 From: regnat Date: Fri, 5 Feb 2021 13:35:31 +0100 Subject: [PATCH 020/510] Make NIX_SHOW_STATS work with new-style commands --- src/libcmd/command.hh | 2 ++ src/libcmd/installables.cc | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index c02193924..e66c697eb 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -48,6 +48,8 @@ struct EvalCommand : virtual StoreCommand, MixEvalArgs ref getEvalState(); std::shared_ptr evalState; + + ~EvalCommand(); }; struct MixFlakeOptions : virtual Args, EvalCommand diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 4739dc974..7102f5a1a 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -280,6 +280,12 @@ ref EvalCommand::getEvalState() return ref(evalState); } +EvalCommand::~EvalCommand() +{ + if (evalState) + evalState->printStats(); +} + void completeFlakeRef(ref store, std::string_view prefix) { if (prefix == "") From 665d4ec2dac6734caff9de5b030be123cb7276ef Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Mar 2021 17:52:57 +0100 Subject: [PATCH 021/510] nix repl :doc: Don't return docs for partially applied primops This gives misleading results for Nixpkgs functions like lib.toUpper. Fixes #4596. --- src/libexpr/eval.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index e2f2308aa..3afe2e47b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -592,10 +592,8 @@ Value & EvalState::getBuiltin(const string & name) std::optional EvalState::getDoc(Value & v) { - if (v.isPrimOp() || v.isPrimOpApp()) { + if (v.isPrimOp()) { auto v2 = &v; - while (v2->isPrimOpApp()) - v2 = v2->primOpApp.left; if (v2->primOp->doc) return Doc { .pos = noPos, From e16431b4665c0362f66bace7734fed0a6c0692d5 Mon Sep 17 00:00:00 2001 From: DavHau Date: Thu, 4 Mar 2021 16:14:23 +0700 Subject: [PATCH 022/510] improve man page for nix.conf (builders) --- src/libstore/globals.hh | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index a51d9c2f1..bf0767dfa 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -206,7 +206,17 @@ public: Setting builders{ this, "@" + nixConfDir + "/machines", "builders", - "A semicolon-separated list of build machines, in the format of `nix.machines`."}; + R"( + A semicolon-separated list of build machines, where each machine follows this format: + + {protocol}://{user}@{host} [{comma sep. systems} - {maxJobs} {speedFactor} {comma sep. features}] + + Examples: + + ssh://root@builder1.com + + ssh://root@builder2.com x86_64-linux,aarch64-linux - 40 20 nixos-test,benchmark,big-parallel,kvm + )"}; Setting buildersUseSubstitutes{ this, false, "builders-use-substitutes", From 6212e89bf604d61fc896f21f66908be6fbbfcc5d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 5 Mar 2021 00:49:46 +0000 Subject: [PATCH 023/510] Avoid some StorePath -> Path -> StorePath roundtrips There were done when StorePath was defined in Rust and there were some FFI issues. This is no longer an issue. --- src/libstore/misc.cc | 35 +++++++++++++++-------------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index ad4dccef9..f58816ad8 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -22,55 +22,53 @@ void Store::computeFSClosure(const StorePathSet & startPaths, Sync state_(State{0, paths_, 0}); - std::function enqueue; + std::function enqueue; std::condition_variable done; - enqueue = [&](const Path & path) -> void { + enqueue = [&](const StorePath & path) -> void { { auto state(state_.lock()); if (state->exc) return; - if (!state->paths.insert(parseStorePath(path)).second) return; + if (!state->paths.insert(path).second) return; state->pending++; } - queryPathInfo(parseStorePath(path), {[&, pathS(path)](std::future> fut) { + queryPathInfo(path, {[&](std::future> fut) { // FIXME: calls to isValidPath() should be async try { auto info = fut.get(); - auto path = parseStorePath(pathS); - if (flipDirection) { StorePathSet referrers; queryReferrers(path, referrers); for (auto & ref : referrers) if (ref != path) - enqueue(printStorePath(ref)); + enqueue(ref); if (includeOutputs) for (auto & i : queryValidDerivers(path)) - enqueue(printStorePath(i)); + enqueue(i); if (includeDerivers && path.isDerivation()) for (auto & i : queryDerivationOutputs(path)) if (isValidPath(i) && queryPathInfo(i)->deriver == path) - enqueue(printStorePath(i)); + enqueue(i); } else { for (auto & ref : info->references) if (ref != path) - enqueue(printStorePath(ref)); + enqueue(ref); if (includeOutputs && path.isDerivation()) for (auto & i : queryDerivationOutputs(path)) - if (isValidPath(i)) enqueue(printStorePath(i)); + if (isValidPath(i)) enqueue(i); if (includeDerivers && info->deriver && isValidPath(*info->deriver)) - enqueue(printStorePath(*info->deriver)); + enqueue(*info->deriver); } @@ -90,7 +88,7 @@ void Store::computeFSClosure(const StorePathSet & startPaths, }; for (auto & startPath : startPaths) - enqueue(printStorePath(startPath)); + enqueue(startPath); { auto state(state_.lock()); @@ -160,13 +158,10 @@ void Store::queryMissing(const std::vector & targets, }; auto checkOutput = [&]( - const Path & drvPathS, ref drv, const Path & outPathS, ref> drvState_) + const StorePath & drvPath, ref drv, const StorePath & outPath, ref> drvState_) { if (drvState_->lock()->done) return; - auto drvPath = parseStorePath(drvPathS); - auto outPath = parseStorePath(outPathS); - SubstitutablePathInfos infos; querySubstitutablePathInfos({{outPath, getDerivationCA(*drv)}}, infos); @@ -203,7 +198,7 @@ void Store::queryMissing(const std::vector & targets, return; } - PathSet invalid; + StorePathSet invalid; /* true for regular derivations, and CA derivations for which we have a trust mapping for all wanted outputs. */ auto knownOutputPaths = true; @@ -213,7 +208,7 @@ void Store::queryMissing(const std::vector & targets, break; } if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt)) - invalid.insert(printStorePath(*pathOpt)); + invalid.insert(*pathOpt); } if (knownOutputPaths && invalid.empty()) return; @@ -223,7 +218,7 @@ void Store::queryMissing(const std::vector & targets, if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) { auto drvState = make_ref>(DrvState(invalid.size())); for (auto & output : invalid) - pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState)); + pool.enqueue(std::bind(checkOutput, path.path, drv, output, drvState)); } else mustBuildDrv(path.path, *drv); From 6e849e3b0a6eb46e6dc65cbd091cc829eab09a5f Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Date: Wed, 3 Mar 2021 14:46:15 -0800 Subject: [PATCH 024/510] nix-build: set execfail When starting a nix-shell with `-i` it was previously possible for it to silently fail in the scenario where the specified interpreter didn't exist. This happened due to the `exec` call masking the issue. With this change we enable `execfail`, which causes the script using `nix-shell` as interpreter to correctly exit with code 127. Fixes: #4598 --- src/nix-build/nix-build.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 7b4a53919..65b85b304 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -447,6 +447,7 @@ static void main_nix_build(int argc, char * * argv) "unset NIX_ENFORCE_PURITY; " "shopt -u nullglob; " "unset TZ; %6%" + "shopt -s execfail;" "%7%", shellEscape(tmpDir), (pure ? "" : "p=$PATH; "), From ac8ba2eae4fc649d7a3a19815631b4d76e60d74a Mon Sep 17 00:00:00 2001 From: "Travis A. Everett" Date: Sat, 6 Mar 2021 19:51:29 -0600 Subject: [PATCH 025/510] remove doc for obsolete --no-build-hook flag `--no-build-hook` appears to have been removed in 25f32625e2f2a3a1e1b3a3811da82f21c3a3b880 --- doc/manual/src/command-ref/opt-common.md | 9 --------- 1 file changed, 9 deletions(-) diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md index 9650f53f8..bc8eb6796 100644 --- a/doc/manual/src/command-ref/opt-common.md +++ b/doc/manual/src/command-ref/opt-common.md @@ -134,15 +134,6 @@ Most Nix commands accept the following command-line options: failure in obtaining the substitutes to lead to a full build from source (with the related consumption of resources). - - `--no-build-hook` - Disables the build hook mechanism. This allows to ignore remote - builders if they are setup on the machine. - - It's useful in cases where the bandwidth between the client and the - remote builder is too low. In that case it can take more time to - upload the sources to the remote builder and fetch back the result - than to do the computation locally. - - `--readonly-mode` When this option is used, no attempt is made to open the Nix database. Most Nix operations do need database access, so those From 89013bdd7ed4007871cc421315b51b7cada8edff Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 9 Mar 2021 10:11:25 +0100 Subject: [PATCH 026/510] Add a `nix realisation` command for working on realisations Currently only has `nix realisation info`, more to come probably --- src/nix/realisation.cc | 78 +++++++++++++++++++++++++++++++++++++ src/nix/realisation/info.md | 15 +++++++ 2 files changed, 93 insertions(+) create mode 100644 src/nix/realisation.cc create mode 100644 src/nix/realisation/info.md diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc new file mode 100644 index 000000000..9ee9ccb91 --- /dev/null +++ b/src/nix/realisation.cc @@ -0,0 +1,78 @@ +#include "command.hh" +#include "common-args.hh" + +#include + +using namespace nix; + +struct CmdRealisation : virtual NixMultiCommand +{ + CmdRealisation() : MultiCommand(RegisterCommand::getCommandsFor({"realisation"})) + { } + + std::string description() override + { + return "manipulate a Nix realisation"; + } + + Category category() override { return catUtility; } + + void run() override + { + if (!command) + throw UsageError("'nix realisation' requires a sub-command."); + command->second->prepare(); + command->second->run(); + } +}; + +static auto rCmdRealisation = registerCommand("realisation"); + +struct CmdRealisationInfo : RealisedPathsCommand, MixJSON +{ + std::string description() override + { + return "query information about one or several realisations"; + } + + std::string doc() override + { + return + #include "realisation/info.md" + ; + } + + Category category() override { return catSecondary; } + + void run(ref store, std::vector paths) override + { + settings.requireExperimentalFeature("ca-derivations"); + if (json) { + nlohmann::json res = nlohmann::json::array(); + for (auto & path : paths) { + nlohmann::json currentPath; + if (auto realisation = std::get_if(&path.raw)) + currentPath = realisation->toJSON(); + else + currentPath["opaquePath"] = store->printStorePath(path.path()); + + res.push_back(currentPath); + } + std::cout << res.dump(); + } + else { + for (auto & path : paths) { + if (auto realisation = std::get_if(&path.raw)) { + std::cout << + realisation->id.to_string() << " " << + store->printStorePath(realisation->outPath); + } else + std::cout << store->printStorePath(path.path()); + + std::cout << std::endl; + } + } + } +}; + +static auto rCmdRealisationInfo = registerCommand2({"realisation", "info"}); diff --git a/src/nix/realisation/info.md b/src/nix/realisation/info.md new file mode 100644 index 000000000..852240f44 --- /dev/null +++ b/src/nix/realisation/info.md @@ -0,0 +1,15 @@ +R"MdBoundary( +# Description + +Display some informations about the given realisation + +# Examples + +Show some information about the realisation of the `hello` package: + +```console +$ nix realisation info nixpkgs#hello --json +[{"id":"sha256:3d382378a00588e064ee30be96dd0fa7e7df7cf3fbcace85a0e7b7dada1eef25!out","outPath":"fd3m7xawvrqcg98kgz5hc2vk3x9q0lh7-hello"}] +``` + +)MdBoundary" From 8a0c00b85600991cdb9aa05902defec6ac44b777 Mon Sep 17 00:00:00 2001 From: Yorick van Pelt Date: Tue, 10 Dec 2019 15:47:38 +0700 Subject: [PATCH 027/510] Use libarchive for all compression --- src/libstore/filetransfer.cc | 2 +- src/libutil/compression.cc | 420 +++++++++-------------------------- src/libutil/compression.hh | 10 +- src/libutil/serialise.cc | 56 ++++- src/libutil/serialise.hh | 8 + src/libutil/tarfile.cc | 104 ++++----- src/libutil/tarfile.hh | 19 ++ 7 files changed, 241 insertions(+), 378 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 8ea5cdc9d..514ab3bf9 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -148,7 +148,7 @@ struct curlFileTransfer : public FileTransfer } LambdaSink finalSink; - std::shared_ptr decompressionSink; + std::shared_ptr decompressionSink; std::optional errorSink; std::exception_ptr writeException; diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 986ba2976..8ba536000 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -1,10 +1,13 @@ #include "compression.hh" +#include "tarfile.hh" #include "util.hh" #include "finally.hh" #include "logging.hh" #include #include +#include +#include #include #include @@ -35,6 +38,80 @@ struct ChunkedCompressionSink : CompressionSink virtual void writeInternal(std::string_view data) = 0; }; +struct ArchiveDecompressionSource : Source +{ + std::unique_ptr archive = 0; + Source & src; + ArchiveDecompressionSource(Source & src) : src(src) {} + ~ArchiveDecompressionSource() override {} + size_t read(char * data, size_t len) override { + struct archive_entry* ae; + if (!archive) { + archive = std::make_unique(src, true); + this->archive->check(archive_read_next_header(this->archive->archive, &ae), "Failed to read header (%s)"); + if (archive_filter_count(this->archive->archive) < 2) { + throw CompressionError("Input compression not recognized."); + } + } + ssize_t result = archive_read_data(this->archive->archive, data, len); + if (result > 0) return result; + if (result == 0) { + throw EndOfFile("reached end of compressed file"); + } + this->archive->check(result, "Failed to read compressed data (%s)"); + return result; + } +}; +struct ArchiveCompressionSink : CompressionSink +{ + Sink & nextSink; + struct archive* archive; + ArchiveCompressionSink(Sink & nextSink, std::string format, bool parallel) : nextSink(nextSink) { + archive = archive_write_new(); + if (!archive) throw Error("failed to initialize libarchive"); + check(archive_write_add_filter_by_name(archive, format.c_str()), "Couldn't initialize compression (%s)"); + check(archive_write_set_format_raw(archive)); + if (format == "xz" && parallel) { + check(archive_write_set_filter_option(archive, format.c_str(), "threads", "0")); + } + // disable internal buffering + check(archive_write_set_bytes_per_block(archive, 0)); + // disable output padding + check(archive_write_set_bytes_in_last_block(archive, 1)); + open(); + } + ~ArchiveCompressionSink() override { + if (archive) archive_write_free(archive); + } + void finish() override { + flush(); + check(archive_write_close(archive)); + } + void check(int err, const char *reason="Failed to compress (%s)") { + if (err == ARCHIVE_EOF) + throw EndOfFile("reached end of archive"); + else if (err != ARCHIVE_OK) + throw Error(reason, archive_error_string(this->archive)); + } + void write(std::string_view data) override { + ssize_t result = archive_write_data(archive, data.data(), data.length()); + if (result <= 0) check(result); + } +private: + void open() { + check(archive_write_open(archive, this, NULL, ArchiveCompressionSink::callback_write, NULL)); + struct archive_entry *ae = archive_entry_new(); + archive_entry_set_filetype(ae, AE_IFREG); + check(archive_write_header(archive, ae)); + archive_entry_free(ae); + } + static ssize_t callback_write(struct archive *archive, void *_self, const void *buffer, size_t length) { + ArchiveCompressionSink *self = (ArchiveCompressionSink *)_self; + self->nextSink({(const char*)buffer, length}); + return length; + } +}; + struct NoneSink : CompressionSink { Sink & nextSink; @@ -43,171 +120,6 @@ struct NoneSink : CompressionSink void write(std::string_view data) override { nextSink(data); } }; -struct GzipDecompressionSink : CompressionSink -{ - Sink & nextSink; - z_stream strm; - bool finished = false; - uint8_t outbuf[BUFSIZ]; - - GzipDecompressionSink(Sink & nextSink) : nextSink(nextSink) - { - strm.zalloc = Z_NULL; - strm.zfree = Z_NULL; - strm.opaque = Z_NULL; - strm.avail_in = 0; - strm.next_in = Z_NULL; - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - - // Enable gzip and zlib decoding (+32) with 15 windowBits - int ret = inflateInit2(&strm,15+32); - if (ret != Z_OK) - throw CompressionError("unable to initialise gzip encoder"); - } - - ~GzipDecompressionSink() - { - inflateEnd(&strm); - } - - void finish() override - { - CompressionSink::flush(); - write({}); - } - - void write(std::string_view data) override - { - assert(data.size() <= std::numeric_limits::max()); - - strm.next_in = (Bytef *) data.data(); - strm.avail_in = data.size(); - - while (!finished && (!data.data() || strm.avail_in)) { - checkInterrupt(); - - int ret = inflate(&strm,Z_SYNC_FLUSH); - if (ret != Z_OK && ret != Z_STREAM_END) - throw CompressionError("error while decompressing gzip file: %d (%d, %d)", - zError(ret), data.size(), strm.avail_in); - - finished = ret == Z_STREAM_END; - - if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { - nextSink({(char *) outbuf, sizeof(outbuf) - strm.avail_out}); - strm.next_out = (Bytef *) outbuf; - strm.avail_out = sizeof(outbuf); - } - } - } -}; - -struct XzDecompressionSink : CompressionSink -{ - Sink & nextSink; - uint8_t outbuf[BUFSIZ]; - lzma_stream strm = LZMA_STREAM_INIT; - bool finished = false; - - XzDecompressionSink(Sink & nextSink) : nextSink(nextSink) - { - lzma_ret ret = lzma_stream_decoder( - &strm, UINT64_MAX, LZMA_CONCATENATED); - if (ret != LZMA_OK) - throw CompressionError("unable to initialise lzma decoder"); - - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - } - - ~XzDecompressionSink() - { - lzma_end(&strm); - } - - void finish() override - { - CompressionSink::flush(); - write({}); - } - - void write(std::string_view data) override - { - strm.next_in = (const unsigned char *) data.data(); - strm.avail_in = data.size(); - - while (!finished && (!data.data() || strm.avail_in)) { - checkInterrupt(); - - lzma_ret ret = lzma_code(&strm, data.data() ? LZMA_RUN : LZMA_FINISH); - if (ret != LZMA_OK && ret != LZMA_STREAM_END) - throw CompressionError("error %d while decompressing xz file", ret); - - finished = ret == LZMA_STREAM_END; - - if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { - nextSink({(char *) outbuf, sizeof(outbuf) - strm.avail_out}); - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - } - } - } -}; - -struct BzipDecompressionSink : ChunkedCompressionSink -{ - Sink & nextSink; - bz_stream strm; - bool finished = false; - - BzipDecompressionSink(Sink & nextSink) : nextSink(nextSink) - { - memset(&strm, 0, sizeof(strm)); - int ret = BZ2_bzDecompressInit(&strm, 0, 0); - if (ret != BZ_OK) - throw CompressionError("unable to initialise bzip2 decoder"); - - strm.next_out = (char *) outbuf; - strm.avail_out = sizeof(outbuf); - } - - ~BzipDecompressionSink() - { - BZ2_bzDecompressEnd(&strm); - } - - void finish() override - { - flush(); - write({}); - } - - void writeInternal(std::string_view data) override - { - assert(data.size() <= std::numeric_limits::max()); - - strm.next_in = (char *) data.data(); - strm.avail_in = data.size(); - - while (strm.avail_in) { - checkInterrupt(); - - int ret = BZ2_bzDecompress(&strm); - if (ret != BZ_OK && ret != BZ_STREAM_END) - throw CompressionError("error while decompressing bzip2 file"); - - finished = ret == BZ_STREAM_END; - - if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { - nextSink({(char *) outbuf, sizeof(outbuf) - strm.avail_out}); - strm.next_out = (char *) outbuf; - strm.avail_out = sizeof(outbuf); - } - } - } -}; - struct BrotliDecompressionSink : ChunkedCompressionSink { Sink & nextSink; @@ -261,161 +173,32 @@ struct BrotliDecompressionSink : ChunkedCompressionSink ref decompress(const std::string & method, const std::string & in) { - StringSink ssink; - auto sink = makeDecompressionSink(method, ssink); - (*sink)(in); - sink->finish(); - return ssink.s; + if (method == "br") { + StringSink ssink; + auto sink = makeDecompressionSink(method, ssink); + (*sink)(in); + sink->finish(); + return ssink.s; + } else { + StringSource ssrc(in); + auto src = makeDecompressionSource(ssrc); + return make_ref(src->drain()); + } } -ref makeDecompressionSink(const std::string & method, Sink & nextSink) +std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink) { if (method == "none" || method == "") - return make_ref(nextSink); - else if (method == "xz") - return make_ref(nextSink); - else if (method == "bzip2") - return make_ref(nextSink); - else if (method == "gzip") - return make_ref(nextSink); + return std::make_unique(nextSink); else if (method == "br") - return make_ref(nextSink); + return std::make_unique(nextSink); else - throw UnknownCompressionMethod("unknown compression method '%s'", method); + return sourceToSink([&](Source & source) { + auto decompressionSource = makeDecompressionSource(source); + decompressionSource->drainInto(nextSink); + }); } -struct XzCompressionSink : CompressionSink -{ - Sink & nextSink; - uint8_t outbuf[BUFSIZ]; - lzma_stream strm = LZMA_STREAM_INIT; - bool finished = false; - - XzCompressionSink(Sink & nextSink, bool parallel) : nextSink(nextSink) - { - lzma_ret ret; - bool done = false; - - if (parallel) { -#ifdef HAVE_LZMA_MT - lzma_mt mt_options = {}; - mt_options.flags = 0; - mt_options.timeout = 300; // Using the same setting as the xz cmd line - mt_options.preset = LZMA_PRESET_DEFAULT; - mt_options.filters = NULL; - mt_options.check = LZMA_CHECK_CRC64; - mt_options.threads = lzma_cputhreads(); - mt_options.block_size = 0; - if (mt_options.threads == 0) - mt_options.threads = 1; - // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the - // number of threads. - ret = lzma_stream_encoder_mt(&strm, &mt_options); - done = true; -#else - printMsg(lvlError, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression"); -#endif - } - - if (!done) - ret = lzma_easy_encoder(&strm, 6, LZMA_CHECK_CRC64); - - if (ret != LZMA_OK) - throw CompressionError("unable to initialise lzma encoder"); - - // FIXME: apply the x86 BCJ filter? - - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - } - - ~XzCompressionSink() - { - lzma_end(&strm); - } - - void finish() override - { - CompressionSink::flush(); - write({}); - } - - void write(std::string_view data) override - { - strm.next_in = (const unsigned char *) data.data(); - strm.avail_in = data.size(); - - while (!finished && (!data.data() || strm.avail_in)) { - checkInterrupt(); - - lzma_ret ret = lzma_code(&strm, data.data() ? LZMA_RUN : LZMA_FINISH); - if (ret != LZMA_OK && ret != LZMA_STREAM_END) - throw CompressionError("error %d while compressing xz file", ret); - - finished = ret == LZMA_STREAM_END; - - if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { - nextSink({(const char *) outbuf, sizeof(outbuf) - strm.avail_out}); - strm.next_out = outbuf; - strm.avail_out = sizeof(outbuf); - } - } - } -}; - -struct BzipCompressionSink : ChunkedCompressionSink -{ - Sink & nextSink; - bz_stream strm; - bool finished = false; - - BzipCompressionSink(Sink & nextSink) : nextSink(nextSink) - { - memset(&strm, 0, sizeof(strm)); - int ret = BZ2_bzCompressInit(&strm, 9, 0, 30); - if (ret != BZ_OK) - throw CompressionError("unable to initialise bzip2 encoder"); - - strm.next_out = (char *) outbuf; - strm.avail_out = sizeof(outbuf); - } - - ~BzipCompressionSink() - { - BZ2_bzCompressEnd(&strm); - } - - void finish() override - { - flush(); - writeInternal({}); - } - - void writeInternal(std::string_view data) override - { - assert(data.size() <= std::numeric_limits::max()); - - strm.next_in = (char *) data.data(); - strm.avail_in = data.size(); - - while (!finished && (!data.data() || strm.avail_in)) { - checkInterrupt(); - - int ret = BZ2_bzCompress(&strm, data.data() ? BZ_RUN : BZ_FINISH); - if (ret != BZ_RUN_OK && ret != BZ_FINISH_OK && ret != BZ_STREAM_END) - throw CompressionError("error %d while compressing bzip2 file", ret); - - finished = ret == BZ_STREAM_END; - - if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) { - nextSink({(const char *) outbuf, sizeof(outbuf) - strm.avail_out}); - strm.next_out = (char *) outbuf; - strm.avail_out = sizeof(outbuf); - } - } - } -}; - struct BrotliCompressionSink : ChunkedCompressionSink { Sink & nextSink; @@ -468,15 +251,20 @@ struct BrotliCompressionSink : ChunkedCompressionSink } } }; +std::unique_ptr makeDecompressionSource(Source & prev) { + return std::unique_ptr(new ArchiveDecompressionSource(prev)); +} ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel) { + std::vector la_supports = { + "bzip2", "compress", "grzip", "gzip", "lrzip", "lz4", "lzip", "lzma", "lzop", "xz", "zstd" + }; + if (std::find(la_supports.begin(), la_supports.end(), method) != la_supports.end()) { + return make_ref(nextSink, method, parallel); + } if (method == "none") return make_ref(nextSink); - else if (method == "xz") - return make_ref(nextSink, parallel); - else if (method == "bzip2") - return make_ref(nextSink); else if (method == "br") return make_ref(nextSink); else diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh index dd666a4e1..192cb3e91 100644 --- a/src/libutil/compression.hh +++ b/src/libutil/compression.hh @@ -8,14 +8,18 @@ namespace nix { -struct CompressionSink : BufferedSink +struct CompressionSink : BufferedSink, FinishSink { - virtual void finish() = 0; + using BufferedSink::operator (); + using BufferedSink::write; + using FinishSink::finish; }; +std::unique_ptr makeDecompressionSource(Source & prev); + ref decompress(const std::string & method, const std::string & in); -ref makeDecompressionSink(const std::string & method, Sink & nextSink); +std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink); ref compress(const std::string & method, const std::string & in, const bool parallel = false); diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index d1a16b6ba..374b48d79 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -201,6 +201,61 @@ static DefaultStackAllocator defaultAllocatorSingleton; StackAllocator *StackAllocator::defaultAllocator = &defaultAllocatorSingleton; +std::unique_ptr sourceToSink(std::function fun) +{ + struct SourceToSink : FinishSink + { + typedef boost::coroutines2::coroutine coro_t; + + std::function fun; + std::optional coro; + + SourceToSink(std::function fun) : fun(fun) + { + } + + std::string_view cur; + + void operator () (std::string_view in) override + { + if (in.empty()) return; + cur = in; + + if (!coro) + coro = coro_t::push_type(VirtualStackAllocator{}, [&](coro_t::pull_type & yield) { + LambdaSource source([&](char *out, size_t out_len) { + if (cur.empty()) { + yield(); + if (yield.get()) { + return (size_t)0; + } + } + + size_t n = std::min(cur.size(), out_len); + memcpy(out, cur.data(), n); + cur.remove_prefix(n); + return n; + }); + fun(source); + }); + + if (!*coro) { abort(); } + + if (!cur.empty()) (*coro)(false); + } + + void finish() { + if (!coro) return; + if (!*coro) abort(); + (*coro)(true); + if (*coro) abort(); + } + }; + + return std::make_unique(fun); +} + + std::unique_ptr sinkToSource( std::function fun, std::function eof) @@ -212,7 +267,6 @@ std::unique_ptr sinkToSource( std::function fun; std::function eof; std::optional coro; - bool started = false; SinkToSource(std::function fun, std::function eof) : fun(fun), eof(eof) diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 5bbbc7ce3..0fe6e8332 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -25,6 +25,13 @@ struct NullSink : Sink { } }; + +struct FinishSink : virtual Sink +{ + virtual void finish() = 0; +}; + + /* A buffered abstract sink. Warning: a BufferedSink should not be used from multiple threads concurrently. */ struct BufferedSink : virtual Sink @@ -281,6 +288,7 @@ struct ChainSource : Source size_t read(char * data, size_t len) override; }; +std::unique_ptr sourceToSink(std::function fun); /* Convert a function that feeds data into a Sink into a Source. The Source executes the function as a coroutine. */ diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 2da169ba7..b5e1cb4c0 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -2,83 +2,73 @@ #include #include "serialise.hh" +#include "tarfile.hh" namespace nix { +static int callback_open(struct archive *, void *self) { + return ARCHIVE_OK; +} -struct TarArchive { - struct archive * archive; - Source * source; - std::vector buffer; +static ssize_t callback_read(struct archive * archive, void * _self, const void * * buffer) { + TarArchive *self = (TarArchive *)_self; + *buffer = self->buffer.data(); - void check(int err, const char * reason = "failed to extract archive: %s") - { + try { + return self->source->read((char *) self->buffer.data(), 4096); + } catch (EndOfFile &) { + return 0; + } catch (std::exception &err) { + archive_set_error(archive, EIO, "Source threw exception: %s", err.what()); + + return -1; + } +} + +static int callback_close(struct archive *, void *self) { + return ARCHIVE_OK; +} + +void TarArchive::check(int err, const char *reason) +{ if (err == ARCHIVE_EOF) throw EndOfFile("reached end of archive"); else if (err != ARCHIVE_OK) throw Error(reason, archive_error_string(this->archive)); } - TarArchive(Source & source) : buffer(4096) - { - this->archive = archive_read_new(); - this->source = &source; +TarArchive::TarArchive(Source& source, bool raw) : buffer(4096) +{ + this->archive = archive_read_new(); + this->source = &source; + if (!raw) { archive_read_support_filter_all(archive); archive_read_support_format_all(archive); - check(archive_read_open(archive, - (void *)this, - TarArchive::callback_open, - TarArchive::callback_read, - TarArchive::callback_close), - "failed to open archive: %s"); - } - - TarArchive(const Path & path) - { - this->archive = archive_read_new(); - + } else { archive_read_support_filter_all(archive); - archive_read_support_format_all(archive); - check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); + archive_read_support_format_raw(archive); + archive_read_support_format_empty(archive); } + check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)"); +} - TarArchive(const TarArchive &) = delete; - void close() - { - check(archive_read_close(archive), "failed to close archive: %s"); - } +TarArchive::TarArchive(const Path &path) +{ + this->archive = archive_read_new(); - ~TarArchive() - { - if (this->archive) archive_read_free(this->archive); - } + archive_read_support_filter_all(archive); + archive_read_support_format_all(archive); + check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); +} -private: +void TarArchive::close() { + check(archive_read_close(this->archive), "Failed to close archive (%s)"); +} - static int callback_open(struct archive *, void * self) { - return ARCHIVE_OK; - } - - static ssize_t callback_read(struct archive * archive, void * _self, const void * * buffer) - { - auto self = (TarArchive *)_self; - *buffer = self->buffer.data(); - - try { - return self->source->read((char *) self->buffer.data(), 4096); - } catch (EndOfFile &) { - return 0; - } catch (std::exception & err) { - archive_set_error(archive, EIO, "source threw exception: %s", err.what()); - return -1; - } - } - - static int callback_close(struct archive *, void * self) { - return ARCHIVE_OK; - } -}; +TarArchive::~TarArchive() { + if (this->archive) archive_read_free(this->archive); +} static void extract_archive(TarArchive & archive, const Path & destDir) { diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh index 89a024f1d..18adf3490 100644 --- a/src/libutil/tarfile.hh +++ b/src/libutil/tarfile.hh @@ -1,7 +1,26 @@ #include "serialise.hh" +#include namespace nix { +struct TarArchive { + struct archive *archive; + Source *source; + std::vector buffer; + + void check(int err, const char *reason = "Failed to extract archive (%s)"); + + TarArchive(Source& source, bool raw = false); + + TarArchive(const Path &path); + + // disable copy constructor + TarArchive(const TarArchive&) = delete; + + void close(); + + ~TarArchive(); +}; void unpackTarfile(Source & source, const Path & destDir); void unpackTarfile(const Path & tarFile, const Path & destDir); From 0431cf6d0992e7986afbb3d0ffd0a7e1cca8ae8a Mon Sep 17 00:00:00 2001 From: "Travis A. Everett" Date: Sun, 7 Feb 2021 15:34:24 -0600 Subject: [PATCH 028/510] fix nixbld user name/uid for macOS --- scripts/bigsur-nixbld-user-migration.sh | 46 +++++++++++++++++++++++++ scripts/install-darwin-multi-user.sh | 2 ++ scripts/install-multi-user.sh | 6 ++-- 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100755 scripts/bigsur-nixbld-user-migration.sh diff --git a/scripts/bigsur-nixbld-user-migration.sh b/scripts/bigsur-nixbld-user-migration.sh new file mode 100755 index 000000000..f1619fd56 --- /dev/null +++ b/scripts/bigsur-nixbld-user-migration.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash + +((NEW_NIX_FIRST_BUILD_UID=301)) + +id_available(){ + dscl . list /Users UniqueID | grep -E '\b'$1'\b' >/dev/null +} + +change_nixbld_names_and_ids(){ + local name uid next_id + ((next_id=NEW_NIX_FIRST_BUILD_UID)) + echo "Attempting to migrate nixbld users." + echo "Each user should change from nixbld# to _nixbld#" + echo "and their IDs relocated to $next_id+" + while read -r name uid; do + echo " Checking $name (uid: $uid)" + # iterate for a clean ID + while id_available "$next_id"; do + ((next_id++)) + if ((next_id >= 400)); then + echo "We've hit UID 400 without placing all of your users :(" + echo "You should use the commands in this script as a starting" + echo "point to review your UID-space and manually move the" + echo "remaining users (or delete them, if you don't need them)." + exit 1 + fi + done + + if [[ $name == _* ]]; then + echo " It looks like $name has already been renamed--skipping." + else + # first 3 are cleanup, it's OK if they aren't here + sudo dscl . delete /Users/$name dsAttrTypeNative:_writers_passwd &>/dev/null || true + sudo dscl . change /Users/$name NFSHomeDirectory "/private/var/empty 1" "/var/empty" &>/dev/null || true + # remove existing user from group + sudo dseditgroup -o edit -t user -d $name nixbld || true + sudo dscl . change /Users/$name UniqueID $uid $next_id + sudo dscl . change /Users/$name RecordName $name _$name + # add renamed user to group + sudo dseditgroup -o edit -t user -a _$name nixbld + echo " $name migrated to _$name (uid: $next_id)" + fi + done < <(dscl . list /Users UniqueID | grep nixbld | sort -n -k2) +} + +change_nixbld_names_and_ids diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index a27be2a43..f6575ae2f 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -4,6 +4,8 @@ set -eu set -o pipefail readonly PLIST_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist +NIX_FIRST_BUILD_UID="301" +NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" dsclattr() { /usr/bin/dscl . -read "$1" \ diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 5e8b4ac18..30ccf1764 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -25,7 +25,9 @@ readonly RED='\033[31m' readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} readonly NIX_BUILD_GROUP_ID="30000" readonly NIX_BUILD_GROUP_NAME="nixbld" -readonly NIX_FIRST_BUILD_UID="30001" +# darwin installer needs to override these +NIX_FIRST_BUILD_UID="30001" +NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" # Please don't change this. We don't support it, because the # default shell profile that comes with Nix doesn't support it. readonly NIX_ROOT="/nix" @@ -104,7 +106,7 @@ EOF } nix_user_for_core() { - printf "nixbld%d" "$1" + printf "$NIX_BUILD_USER_NAME_TEMPLATE" "$1" } nix_uid_for_core() { From 826877cabf9374e0acd5408c6975ee332b1cccc8 Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 8 Mar 2021 11:56:33 +0100 Subject: [PATCH 029/510] Add some logic for signing realisations Not exposed anywhere, but built realisations are now signed (and this should be forwarded when copy-ing them around) --- src/libstore/build/local-derivation-goal.cc | 12 ++++-- src/libstore/ca-specific-schema.sql | 1 + src/libstore/local-store.cc | 29 ++++++++++--- src/libstore/local-store.hh | 4 +- src/libstore/realisation.cc | 46 ++++++++++++++++++++- src/libstore/realisation.hh | 8 ++++ src/libstore/store-api.hh | 5 +++ 7 files changed, 93 insertions(+), 12 deletions(-) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 9c2f1dda6..048135ccf 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2615,10 +2615,14 @@ void LocalDerivationGoal::registerOutputs() but it's fine to do in all cases. */ if (settings.isExperimentalFeatureEnabled("ca-derivations")) { - for (auto& [outputName, newInfo] : infos) - worker.store.registerDrvOutput(Realisation{ - .id = DrvOutput{initialOutputs.at(outputName).outputHash, outputName}, - .outPath = newInfo.path}); + for (auto& [outputName, newInfo] : infos) { + auto thisRealisation = Realisation{ + .id = DrvOutput{initialOutputs.at(outputName).outputHash, + outputName}, + .outPath = newInfo.path}; + getLocalStore().signRealisation(thisRealisation); + worker.store.registerDrvOutput(thisRealisation); + } } } diff --git a/src/libstore/ca-specific-schema.sql b/src/libstore/ca-specific-schema.sql index 93c442826..20ee046a1 100644 --- a/src/libstore/ca-specific-schema.sql +++ b/src/libstore/ca-specific-schema.sql @@ -6,6 +6,7 @@ create table if not exists Realisations ( drvPath text not null, outputName text not null, -- symbolic output id, usually "out" outputPath integer not null, + signatures text, -- space-separated list primary key (drvPath, outputName), foreign key (outputPath) references ValidPaths(id) on delete cascade ); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 90fb4a4bd..6bc963f27 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -310,13 +310,13 @@ LocalStore::LocalStore(const Params & params) if (settings.isExperimentalFeatureEnabled("ca-derivations")) { state->stmts->RegisterRealisedOutput.create(state->db, R"( - insert or replace into Realisations (drvPath, outputName, outputPath) - values (?, ?, (select id from ValidPaths where path = ?)) + insert or replace into Realisations (drvPath, outputName, outputPath, signatures) + values (?, ?, (select id from ValidPaths where path = ?), ?) ; )"); state->stmts->QueryRealisedOutput.create(state->db, R"( - select Output.path from Realisations + select Output.path, Realisations.signatures from Realisations inner join ValidPaths as Output on Output.id = Realisations.outputPath where drvPath = ? and outputName = ? ; @@ -662,6 +662,7 @@ void LocalStore::registerDrvOutput(const Realisation & info) (info.id.strHash()) (info.id.outputName) (printStorePath(info.outPath)) + (concatStringsSep(" ", info.signatures)) .exec(); }); } @@ -1107,6 +1108,11 @@ bool LocalStore::pathInfoIsTrusted(const ValidPathInfo & info) return requireSigs && !info.checkSignatures(*this, getPublicKeys()); } +bool LocalStore::realisationIsUntrusted(const Realisation & realisation) +{ + return requireSigs && !realisation.checkSignatures(getPublicKeys()); +} + void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { @@ -1612,6 +1618,18 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si } +void LocalStore::signRealisation(Realisation & realisation) +{ + // FIXME: keep secret keys in memory. + + auto secretKeyFiles = settings.secretKeyFiles; + + for (auto & secretKeyFile : secretKeyFiles.get()) { + SecretKey secretKey(readFile(secretKeyFile)); + realisation.sign(secretKey); + } +} + void LocalStore::signPathInfo(ValidPathInfo & info) { // FIXME: keep secret keys in memory. @@ -1649,8 +1667,9 @@ std::optional LocalStore::queryRealisation( if (!use.next()) return std::nullopt; auto outputPath = parseStorePath(use.getStr(0)); - return Ret{ - Realisation{.id = id, .outPath = outputPath}}; + auto signatures = tokenizeString(use.getStr(1)); + return Ret{Realisation{ + .id = id, .outPath = outputPath, .signatures = signatures}}; }); } } // namespace nix diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index fc67f215a..d54609f01 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -137,6 +137,7 @@ public: SubstitutablePathInfos & infos) override; bool pathInfoIsTrusted(const ValidPathInfo &) override; + bool realisationIsUntrusted(const Realisation & ) override; void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; @@ -272,9 +273,10 @@ private: bool isValidPath_(State & state, const StorePath & path); void queryReferrers(State & state, const StorePath & path, StorePathSet & referrers); - /* Add signatures to a ValidPathInfo using the secret keys + /* Add signatures to a ValidPathInfo or Realisation using the secret keys specified by the ‘secret-key-files’ option. */ void signPathInfo(ValidPathInfo & info); + void signRealisation(Realisation &); Path getRealStoreDir() override { return realStoreDir; } diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index cd74af4ee..638065547 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -25,27 +25,69 @@ nlohmann::json Realisation::toJSON() const { return nlohmann::json{ {"id", id.to_string()}, {"outPath", outPath.to_string()}, + {"signatures", signatures}, }; } Realisation Realisation::fromJSON( const nlohmann::json& json, const std::string& whence) { - auto getField = [&](std::string fieldName) -> std::string { + auto getOptionalField = [&](std::string fieldName) -> std::optional { auto fieldIterator = json.find(fieldName); if (fieldIterator == json.end()) + return std::nullopt; + return *fieldIterator; + }; + auto getField = [&](std::string fieldName) -> std::string { + if (auto field = getOptionalField(fieldName)) + return *field; + else throw Error( "Drv output info file '%1%' is corrupt, missing field %2%", whence, fieldName); - return *fieldIterator; }; + StringSet signatures; + if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end()) + signatures.insert(signaturesIterator->begin(), signaturesIterator->end()); + return Realisation{ .id = DrvOutput::parse(getField("id")), .outPath = StorePath(getField("outPath")), + .signatures = signatures, }; } +std::string Realisation::fingerprint() const +{ + auto serialized = toJSON(); + serialized.erase("signatures"); + return serialized.dump(); +} + +void Realisation::sign(const SecretKey & secretKey) +{ + signatures.insert(secretKey.signDetached(fingerprint())); +} + +bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const +{ + return verifyDetached(fingerprint(), sig, publicKeys); +} + +size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const +{ + // FIXME: Maybe we should return `maxSigs` if the realisation corresponds to + // an input-addressed one − because in that case the drv is enough to check + // it − but we can't know that here. + + size_t good = 0; + for (auto & sig : signatures) + if (checkSignature(publicKeys, sig)) + good++; + return good; +} + StorePath RealisedPath::path() const { return std::visit([](auto && arg) { return arg.getPath(); }, raw); } diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh index fc92d3c17..f5049c9e9 100644 --- a/src/libstore/realisation.hh +++ b/src/libstore/realisation.hh @@ -3,6 +3,7 @@ #include "path.hh" #include #include "comparator.hh" +#include "crypto.hh" namespace nix { @@ -25,9 +26,16 @@ struct Realisation { DrvOutput id; StorePath outPath; + StringSet signatures; + nlohmann::json toJSON() const; static Realisation fromJSON(const nlohmann::json& json, const std::string& whence); + std::string fingerprint() const; + void sign(const SecretKey &); + bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; + size_t checkSignatures(const PublicKeys & publicKeys) const; + StorePath getPath() const { return outPath; } GENERATE_CMP(Realisation, me->id, me->outPath); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 71a28eeb8..0cd56d34e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -389,6 +389,11 @@ public: return true; } + virtual bool realisationIsUntrusted(const Realisation & ) + { + return true; + } + protected: virtual void queryPathInfoUncached(const StorePath & path, From 3e6017f911127555cfbed71fe4a4df8f70d08bbb Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 8 Mar 2021 15:07:33 +0100 Subject: [PATCH 030/510] pathInfoIsTrusted -> pathInfoIsUntrusted I guess the rationale behind the old name wath that `pathInfoIsTrusted(info)` returns `true` iff we would need to `blindly` trust the path (because it has no valid signature and `requireSigs` is set), but I find it to be a really confusing footgun because it's quite natural to give it the opposite meaning. --- src/libstore/build/substitution-goal.cc | 2 +- src/libstore/local-store.cc | 4 ++-- src/libstore/local-store.hh | 2 +- src/libstore/store-api.hh | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 5d88b8758..7b1ac126e 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -142,7 +142,7 @@ void PathSubstitutionGoal::tryNext() /* Bail out early if this substituter lacks a valid signature. LocalStore::addToStore() also checks for this, but only after we've downloaded the path. */ - if (!sub->isTrusted && worker.store.pathInfoIsTrusted(*info)) + if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info)) { warn("substituter '%s' does not have a valid signature for path '%s'", sub->getUri(), worker.store.printStorePath(storePath)); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 6bc963f27..950a9f74e 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1103,7 +1103,7 @@ const PublicKeys & LocalStore::getPublicKeys() return *state->publicKeys; } -bool LocalStore::pathInfoIsTrusted(const ValidPathInfo & info) +bool LocalStore::pathInfoIsUntrusted(const ValidPathInfo & info) { return requireSigs && !info.checkSignatures(*this, getPublicKeys()); } @@ -1116,7 +1116,7 @@ bool LocalStore::realisationIsUntrusted(const Realisation & realisation) void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { - if (checkSigs && pathInfoIsTrusted(info)) + if (checkSigs && pathInfoIsUntrusted(info)) throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path)); addTempRoot(info.path); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index d54609f01..c311d295a 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -136,7 +136,7 @@ public: void querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos) override; - bool pathInfoIsTrusted(const ValidPathInfo &) override; + bool pathInfoIsUntrusted(const ValidPathInfo &) override; bool realisationIsUntrusted(const Realisation & ) override; void addToStore(const ValidPathInfo & info, Source & source, diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 0cd56d34e..b90aeaa4c 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -384,7 +384,7 @@ public: we don't really want to add the dependencies listed in a nar info we don't trust anyyways. */ - virtual bool pathInfoIsTrusted(const ValidPathInfo &) + virtual bool pathInfoIsUntrusted(const ValidPathInfo &) { return true; } From 54ced9072b94515a756e1e8e76c92a42f0ccf366 Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 8 Mar 2021 16:43:11 +0100 Subject: [PATCH 031/510] Check the signatures when copying store paths around Broken atm --- src/libstore/local-store.cc | 8 ++++++++ src/libstore/local-store.hh | 1 + src/libstore/store-api.cc | 2 +- src/libstore/store-api.hh | 2 ++ tests/ca/signatures.sh | 39 +++++++++++++++++++++++++++++++++++++ tests/local.mk | 3 ++- 6 files changed, 53 insertions(+), 2 deletions(-) create mode 100644 tests/ca/signatures.sh diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 950a9f74e..83daa7506 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -652,6 +652,14 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat } } +void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) +{ + settings.requireExperimentalFeature("ca-derivations"); + if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info)) + registerDrvOutput(info); + else + throw Error("cannot register realisation '%s' because it lacks a valid signature", info.outPath.to_string()); +} void LocalStore::registerDrvOutput(const Realisation & info) { diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index c311d295a..26e034a82 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -203,6 +203,7 @@ public: /* Register the store path 'output' as the output named 'outputName' of derivation 'deriver'. */ void registerDrvOutput(const Realisation & info) override; + void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override; void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output); std::optional queryRealisation(const DrvOutput&) override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 77c310988..5e321cedf 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -798,7 +798,7 @@ std::map copyPaths(ref srcStore, ref dstStor auto pathsMap = copyPaths(srcStore, dstStore, storePaths, repair, checkSigs, substitute); try { for (auto & realisation : realisations) { - dstStore->registerDrvOutput(realisation); + dstStore->registerDrvOutput(realisation, checkSigs); } } catch (MissingExperimentalFeature & e) { // Don't fail if the remote doesn't support CA derivations is it might diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index b90aeaa4c..5d19e8949 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -485,6 +485,8 @@ public: */ virtual void registerDrvOutput(const Realisation & output) { unsupported("registerDrvOutput"); } + virtual void registerDrvOutput(const Realisation & output, CheckSigsFlag checkSigs) + { return registerDrvOutput(output); } /* Write a NAR dump of a store path. */ virtual void narFromPath(const StorePath & path, Sink & sink) = 0; diff --git a/tests/ca/signatures.sh b/tests/ca/signatures.sh new file mode 100644 index 000000000..4b4e468f7 --- /dev/null +++ b/tests/ca/signatures.sh @@ -0,0 +1,39 @@ +source common.sh + +# Globally enable the ca derivations experimental flag +sed -i 's/experimental-features = .*/& ca-derivations ca-references/' "$NIX_CONF_DIR/nix.conf" + +clearStore +clearCache + +nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1 +pk1=$(cat $TEST_ROOT/pk1) + +export REMOTE_STORE_DIR="$TEST_ROOT/remote_store" +export REMOTE_STORE="file://$REMOTE_STORE_DIR" + +ensureCorrectlyCopied () { + attrPath="$1" + nix build --store "$REMOTE_STORE" --file ./content-addressed.nix "$attrPath" +} + +testOneCopy () { + clearStore + rm -rf "$REMOTE_STORE_DIR" + + attrPath="$1" + nix copy --to $REMOTE_STORE "$attrPath" --file ./content-addressed.nix \ + --secret-key-files "$TEST_ROOT/sk1" + + ensureCorrectlyCopied "$attrPath" + + # Ensure that we can copy back what we put in the store + clearStore + nix copy --from $REMOTE_STORE \ + --file ./content-addressed.nix "$attrPath" \ + --trusted-public-keys $pk1 +} + +for attrPath in rootCA dependentCA transitivelyDependentCA dependentNonCA dependentFixedOutput; do + testOneCopy "$attrPath" +done diff --git a/tests/local.mk b/tests/local.mk index e17555051..9a227bec5 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -41,8 +41,9 @@ nix_tests = \ build.sh \ compute-levels.sh \ ca/build.sh \ - ca/nix-copy.sh \ ca/substitute.sh + ca/signatures.sh \ + ca/nix-copy.sh # parallel.sh install-tests += $(foreach x, $(nix_tests), tests/$(x)) From 703c98c6cb922ff9d8cd8cb2c1104e0d3b15b803 Mon Sep 17 00:00:00 2001 From: regnat Date: Mon, 8 Mar 2021 17:32:20 +0100 Subject: [PATCH 032/510] Properly sign the unresolved drvs Don't let them inherit the signature from the parent one (because it makes no sense to do so), but re-sign them after they have been built --- src/libstore/build/derivation-goal.cc | 2 ++ src/libstore/build/derivation-goal.hh | 3 +++ src/libstore/build/local-derivation-goal.cc | 7 ++++++- src/libstore/build/local-derivation-goal.hh | 2 ++ 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 7dcd2a6eb..d624e58b9 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -925,6 +925,8 @@ void DerivationGoal::resolvedFinished() { if (realisation) { auto newRealisation = *realisation; newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput}; + newRealisation.signatures.clear(); + signRealisation(newRealisation); worker.store.registerDrvOutput(newRealisation); } else { // If we don't have a realisation, then it must mean that something diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh index c85bcd84f..704b77caf 100644 --- a/src/libstore/build/derivation-goal.hh +++ b/src/libstore/build/derivation-goal.hh @@ -180,6 +180,9 @@ struct DerivationGoal : public Goal /* Open a log file and a pipe to it. */ Path openLogFile(); + /* Sign the newly built realisation if the store allows it */ + virtual void signRealisation(Realisation&) {} + /* Close the log file. */ void closeLogFile(); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 048135ccf..2966bb565 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2620,12 +2620,17 @@ void LocalDerivationGoal::registerOutputs() .id = DrvOutput{initialOutputs.at(outputName).outputHash, outputName}, .outPath = newInfo.path}; - getLocalStore().signRealisation(thisRealisation); + signRealisation(thisRealisation); worker.store.registerDrvOutput(thisRealisation); } } } +void LocalDerivationGoal::signRealisation(Realisation & realisation) +{ + getLocalStore().signRealisation(realisation); +} + void LocalDerivationGoal::checkOutputs(const std::map & outputs) { diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 4bbf27a1b..47b818a8b 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -161,6 +161,8 @@ struct LocalDerivationGoal : public DerivationGoal as valid. */ void registerOutputs() override; + void signRealisation(Realisation &) override; + /* Check that an output meets the requirements specified by the 'outputChecks' attribute (or the legacy '{allowed,disallowed}{References,Requisites}' attributes). */ From 5869b3025d8ed2b99a8dca61f335789ce6dc83e1 Mon Sep 17 00:00:00 2001 From: sternenseemann <0rpkxez4ksa01gb3typccl0i@systemli.org> Date: Tue, 16 Mar 2021 02:42:14 +0100 Subject: [PATCH 033/510] tests/local.mk: fix missing newline escape Fixes syntax error introduced in 54ced9072b94515a756e1e8e76c92a42f0ccf366. --- tests/local.mk | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/local.mk b/tests/local.mk index 1ca363091..de095c117 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -42,7 +42,7 @@ nix_tests = \ build.sh \ compute-levels.sh \ ca/build.sh \ - ca/substitute.sh + ca/substitute.sh \ ca/signatures.sh \ ca/nix-copy.sh # parallel.sh From 5716345adf2e794fd62229ea52352e74e92e8e63 Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 10 Nov 2020 10:43:33 +0100 Subject: [PATCH 034/510] Add a test ensuring compatibility with an old daemon This requires adding `nix` to its own closure which is a bit unfortunate, but as it is optional (the test will be disabled if `OUTER_NIX` is unset) it shouldn't be too much of an issue. (Ideally this should go in another derivation so that we can build Nix and run the test independently, but as the tests are running in the same derivation as the build it's a bit complicated to do so). --- flake.nix | 9 +++++++++ tests/common.sh.in | 3 +-- tests/local.mk | 2 +- tests/remote-store-old-daemon.sh | 7 +++++++ tests/remote-store.sh | 4 ++-- 5 files changed, 20 insertions(+), 5 deletions(-) create mode 100644 tests/remote-store-old-daemon.sh diff --git a/flake.nix b/flake.nix index e59ec9a35..3e236aaca 100644 --- a/flake.nix +++ b/flake.nix @@ -150,6 +150,11 @@ # 'nix.perl-bindings' packages. overlay = final: prev: { + # An older version of Nix to test against when using the daemon. + # Currently using `nixUnstable` as the stable one doesn't respect + # `NIX_DAEMON_SOCKET_PATH` which is needed for the tests. + mainstream-nix = prev.nixUnstable; + nix = with final; with commonDeps pkgs; stdenv.mkDerivation { name = "nix-${version}"; inherit version; @@ -158,6 +163,8 @@ VERSION_SUFFIX = versionSuffix; + OUTER_NIX = mainstream-nix; + outputs = [ "out" "dev" "doc" ]; nativeBuildInputs = nativeBuildDeps; @@ -486,6 +493,8 @@ stdenv.mkDerivation { name = "nix"; + OUTER_NIX = mainstream-nix; + outputs = [ "out" "dev" "doc" ]; nativeBuildInputs = nativeBuildDeps; diff --git a/tests/common.sh.in b/tests/common.sh.in index de44a4da4..277dd6dfa 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -57,7 +57,6 @@ clearStore() { mkdir "$NIX_STORE_DIR" rm -rf "$NIX_STATE_DIR" mkdir "$NIX_STATE_DIR" - nix-store --init clearProfiles } @@ -73,7 +72,7 @@ startDaemon() { # Start the daemon, wait for the socket to appear. !!! # ‘nix-daemon’ should have an option to fork into the background. rm -f $NIX_STATE_DIR/daemon-socket/socket - nix daemon & + ${NIX_DAEMON_COMMAND:-nix daemon} & for ((i = 0; i < 30; i++)); do if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi sleep 1 diff --git a/tests/local.mk b/tests/local.mk index de095c117..dd9a0ad56 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -6,7 +6,7 @@ nix_tests = \ gc-auto.sh \ referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \ gc-runtime.sh check-refs.sh filter-source.sh \ - local-store.sh remote-store.sh export.sh export-graph.sh \ + local-store.sh remote-store.sh remote-store-old-daemon.sh export.sh export-graph.sh \ timeout.sh secure-drv-outputs.sh nix-channel.sh \ multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \ binary-cache.sh \ diff --git a/tests/remote-store-old-daemon.sh b/tests/remote-store-old-daemon.sh new file mode 100644 index 000000000..ede7ce716 --- /dev/null +++ b/tests/remote-store-old-daemon.sh @@ -0,0 +1,7 @@ +# Test that the new Nix can properly talk to an old daemon. +# If `$OUTER_NIX` isn't set (e.g. when bootsraping), just skip this test + +if [[ -n "$OUTER_NIX" ]]; then + export NIX_DAEMON_COMMAND=$OUTER_NIX/bin/nix-daemon + source remote-store.sh +fi diff --git a/tests/remote-store.sh b/tests/remote-store.sh index f7ae1a2ed..31210ab47 100644 --- a/tests/remote-store.sh +++ b/tests/remote-store.sh @@ -23,12 +23,12 @@ startDaemon storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh +nix-store --gc --max-freed 1K + nix-store --dump-db > $TEST_ROOT/d1 NIX_REMOTE= nix-store --dump-db > $TEST_ROOT/d2 cmp $TEST_ROOT/d1 $TEST_ROOT/d2 -nix-store --gc --max-freed 1K - killDaemon user=$(whoami) From eab9cdbd75e739be33f9433cfba9ab354d084440 Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 17 Nov 2020 14:33:09 +0100 Subject: [PATCH 035/510] Add a test for the migration of the db between versions --- tests/db-migration.sh | 25 +++++++++++++++++++++++++ tests/local.mk | 1 + 2 files changed, 26 insertions(+) create mode 100644 tests/db-migration.sh diff --git a/tests/db-migration.sh b/tests/db-migration.sh new file mode 100644 index 000000000..e6a405770 --- /dev/null +++ b/tests/db-migration.sh @@ -0,0 +1,25 @@ +# Test that we can successfully migrate from an older db schema + +# Only run this if we have an older Nix available +if [[ -z "$OUTER_NIX" ]]; then + exit 0 +fi + +source common.sh + +# Fill the db using the older Nix +PATH_WITH_NEW_NIX="$PATH" +export PATH="$OUTER_NIX/bin:$PATH" +clearStore +nix-build simple.nix --no-out-link +nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1 +dependenciesOutPath=$(nix-build dependencies.nix --no-out-link --secret-key-files "$TEST_ROOT/sk1") +fixedOutPath=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build fixed.nix -A good.0 --no-out-link) + +# Migrate to the new schema and ensure that everything's there +export PATH="$PATH_WITH_NEW_NIX" +info=$(nix path-info --json $dependenciesOutPath) +[[ $info =~ '"ultimate":true' ]] +[[ $info =~ 'cache1.example.org' ]] +nix verify -r "$fixedOutPath" +nix verify -r "$dependenciesOutPath" --sigs-needed 1 --trusted-public-keys $(cat $TEST_ROOT/pk1) diff --git a/tests/local.mk b/tests/local.mk index dd9a0ad56..01c35551f 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -7,6 +7,7 @@ nix_tests = \ referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \ gc-runtime.sh check-refs.sh filter-source.sh \ local-store.sh remote-store.sh remote-store-old-daemon.sh export.sh export-graph.sh \ + db-migration.sh \ timeout.sh secure-drv-outputs.sh nix-channel.sh \ multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \ binary-cache.sh \ From a0866c8ea4bc66f9aacc7ad19139d57946b3df18 Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 16 Mar 2021 13:43:08 +0100 Subject: [PATCH 036/510] Make the tests (optionnally) run in another derivation That way we can run them without rebuilding Nix --- flake.nix | 41 ++++++++++++++++++++++++++++++++++++----- tests/common.sh.in | 6 ++++++ 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index 3e236aaca..c2e5db53a 100644 --- a/flake.nix +++ b/flake.nix @@ -144,6 +144,32 @@ echo "file installer $out/install" >> $out/nix-support/hydra-build-products ''; + testNixVersions = pkgs: client: daemon: with commonDeps pkgs; pkgs.stdenv.mkDerivation { + NIX_DAEMON_PACKAGE = daemon; + NIX_CLIENT_PACKAGE = client; + name = "nix-tests-${client.version}-against-${daemon.version}"; + inherit version; + + src = self; + + VERSION_SUFFIX = versionSuffix; + + nativeBuildInputs = nativeBuildDeps; + buildInputs = buildDeps ++ awsDeps; + propagatedBuildInputs = propagatedDeps; + + enableParallelBuilding = true; + + dontBuild = true; + doInstallCheck = true; + + installPhase = '' + mkdir -p $out + ''; + installCheckPhase = "make installcheck"; + + }; + in { # A Nixpkgs overlay that overrides the 'nix' and @@ -153,7 +179,7 @@ # An older version of Nix to test against when using the daemon. # Currently using `nixUnstable` as the stable one doesn't respect # `NIX_DAEMON_SOCKET_PATH` which is needed for the tests. - mainstream-nix = prev.nixUnstable; + nixStable = prev.nix; nix = with final; with commonDeps pkgs; stdenv.mkDerivation { name = "nix-${version}"; @@ -163,8 +189,6 @@ VERSION_SUFFIX = versionSuffix; - OUTER_NIX = mainstream-nix; - outputs = [ "out" "dev" "doc" ]; nativeBuildInputs = nativeBuildDeps; @@ -441,6 +465,15 @@ checks = forAllSystems (system: { binaryTarball = self.hydraJobs.binaryTarball.${system}; perlBindings = self.hydraJobs.perlBindings.${system}; + installTests = + let pkgs = nixpkgsFor.${system}; in + pkgs.runCommand "install-tests" { + againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix; + againstCurrentUnstable = testNixVersions pkgs pkgs.nix pkgs.nixUnstable; + # Disabled because the latest stable version doesn't handle + # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work + # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable; + } "touch $out"; }); packages = forAllSystems (system: { @@ -493,8 +526,6 @@ stdenv.mkDerivation { name = "nix"; - OUTER_NIX = mainstream-nix; - outputs = [ "out" "dev" "doc" ]; nativeBuildInputs = nativeBuildDeps; diff --git a/tests/common.sh.in b/tests/common.sh.in index 277dd6dfa..d31d3fbb8 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -29,6 +29,12 @@ unset XDG_CACHE_HOME mkdir -p $TEST_HOME export PATH=@bindir@:$PATH +if [[ -n "${NIX_CLIENT_PACKAGE:-}" ]]; then + export PATH="$NIX_CLIENT_PACKAGE/bin":$PATH +fi +if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then + export NIX_DAEMON_COMMAND="$NIX_DAEMON_PACKAGE/bin/nix-daemon" +fi coreutils=@coreutils@ export dot=@dot@ From 81df1b5c687b7606f0159485c33bf5f7e2614eba Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 16 Mar 2021 14:15:57 +0100 Subject: [PATCH 037/510] Remove the `remote-store-old-daemon` test Doesn't make sense anymore with the new setup --- tests/local.mk | 2 +- tests/remote-store-old-daemon.sh | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) delete mode 100644 tests/remote-store-old-daemon.sh diff --git a/tests/local.mk b/tests/local.mk index 01c35551f..e7e85f97e 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -6,7 +6,7 @@ nix_tests = \ gc-auto.sh \ referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \ gc-runtime.sh check-refs.sh filter-source.sh \ - local-store.sh remote-store.sh remote-store-old-daemon.sh export.sh export-graph.sh \ + local-store.sh remote-store.sh export.sh export-graph.sh \ db-migration.sh \ timeout.sh secure-drv-outputs.sh nix-channel.sh \ multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \ diff --git a/tests/remote-store-old-daemon.sh b/tests/remote-store-old-daemon.sh deleted file mode 100644 index ede7ce716..000000000 --- a/tests/remote-store-old-daemon.sh +++ /dev/null @@ -1,7 +0,0 @@ -# Test that the new Nix can properly talk to an old daemon. -# If `$OUTER_NIX` isn't set (e.g. when bootsraping), just skip this test - -if [[ -n "$OUTER_NIX" ]]; then - export NIX_DAEMON_COMMAND=$OUTER_NIX/bin/nix-daemon - source remote-store.sh -fi From be60c9ef50bf5fa653138802f63727fa0aadf50a Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 16 Mar 2021 14:20:10 +0100 Subject: [PATCH 038/510] Fix the db-migration test --- tests/db-migration.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/db-migration.sh b/tests/db-migration.sh index e6a405770..e0ff7d311 100644 --- a/tests/db-migration.sh +++ b/tests/db-migration.sh @@ -1,7 +1,8 @@ # Test that we can successfully migrate from an older db schema # Only run this if we have an older Nix available -if [[ -z "$OUTER_NIX" ]]; then +# XXX: This assumes that the `daemon` package is older than the `client` one +if [[ -z "$NIX_DAEMON_PACKAGE" ]]; then exit 0 fi @@ -9,7 +10,7 @@ source common.sh # Fill the db using the older Nix PATH_WITH_NEW_NIX="$PATH" -export PATH="$OUTER_NIX/bin:$PATH" +export PATH="$NIX_DAEMON_PACKAGE/bin:$PATH" clearStore nix-build simple.nix --no-out-link nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1 From 5ec873b127139ca90cc31967c25c9a34fb4cc3e4 Mon Sep 17 00:00:00 2001 From: regnat Date: Tue, 16 Mar 2021 16:44:42 +0100 Subject: [PATCH 039/510] Shorten the test drv name To prevent the OSX build to fail because of a too long socket path --- flake.nix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index c2e5db53a..1cd54e702 100644 --- a/flake.nix +++ b/flake.nix @@ -147,7 +147,10 @@ testNixVersions = pkgs: client: daemon: with commonDeps pkgs; pkgs.stdenv.mkDerivation { NIX_DAEMON_PACKAGE = daemon; NIX_CLIENT_PACKAGE = client; - name = "nix-tests-${client.version}-against-${daemon.version}"; + # Must keep this name short as OSX has a rather strict limit on the + # socket path length, and this name appears in the path of the + # nix-daemon socket used in the tests + name = "nix-tests"; inherit version; src = self; From 77f5d171e17294ebb017a386d4408bf4613dfed7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Mar 2021 16:53:39 +0100 Subject: [PATCH 040/510] --override-input: Imply --no-write-lock-file Fixes #3779. --- src/libcmd/installables.cc | 3 ++- src/nix/flake.cc | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 7102f5a1a..898e642a5 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -111,10 +111,11 @@ MixFlakeOptions::MixFlakeOptions() addFlag({ .longName = "override-input", - .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`).", + .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.", .category = category, .labels = {"input-path", "flake-url"}, .handler = {[&](std::string inputPath, std::string flakeRef) { + lockFlags.writeLockFile = false; lockFlags.inputOverrides.insert_or_assign( flake::parseInputPath(inputPath), parseFlakeRef(flakeRef, absPath("."))); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 2f0c468a8..d37791aba 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -110,6 +110,7 @@ struct CmdFlakeUpdate : FlakeCommand removeFlag("recreate-lock-file"); removeFlag("update-input"); removeFlag("no-update-lock-file"); + removeFlag("no-write-lock-file"); } std::string doc() override @@ -124,6 +125,7 @@ struct CmdFlakeUpdate : FlakeCommand settings.tarballTtl = 0; lockFlags.recreateLockFile = true; + lockFlags.writeLockFile = true; lockFlake(); } @@ -136,6 +138,12 @@ struct CmdFlakeLock : FlakeCommand return "create missing lock file entries"; } + CmdFlakeLock() + { + /* Remove flags that don't make sense. */ + removeFlag("no-write-lock-file"); + } + std::string doc() override { return @@ -147,6 +155,8 @@ struct CmdFlakeLock : FlakeCommand { settings.tarballTtl = 0; + lockFlags.writeLockFile = true; + lockFlake(); } }; From 66fa1c7375e4b3073a16df4678cf1d37446ed20b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Mar 2021 17:19:04 +0100 Subject: [PATCH 041/510] Merge 'nix flake {info,list-inputs}' into 'nix flake metadata' Fixes #4613. --- src/nix/flake-list-inputs.md | 23 ---- src/nix/{flake-info.md => flake-metadata.md} | 27 ++-- src/nix/flake.cc | 134 ++++++++----------- tests/flakes.sh | 38 +++--- 4 files changed, 92 insertions(+), 130 deletions(-) delete mode 100644 src/nix/flake-list-inputs.md rename src/nix/{flake-info.md => flake-metadata.md} (75%) diff --git a/src/nix/flake-list-inputs.md b/src/nix/flake-list-inputs.md deleted file mode 100644 index 250e13be0..000000000 --- a/src/nix/flake-list-inputs.md +++ /dev/null @@ -1,23 +0,0 @@ -R""( - -# Examples - -* Show the inputs of the `hydra` flake: - - ```console - # nix flake list-inputs github:NixOS/hydra - github:NixOS/hydra/bde8d81876dfc02143e5070e42c78d8f0d83d6f7 - ├───nix: github:NixOS/nix/79aa7d95183cbe6c0d786965f0dbff414fd1aa67 - │ ├───lowdown-src: github:kristapsdz/lowdown/1705b4a26fbf065d9574dce47a94e8c7c79e052f - │ └───nixpkgs: github:NixOS/nixpkgs/ad0d20345219790533ebe06571f82ed6b034db31 - └───nixpkgs follows input 'nix/nixpkgs' - ``` - -# Description - -This command shows the inputs of the flake specified by the flake -referenced *flake-url*. Since it prints the locked inputs that result -from generating or updating the lock file, this command essentially -displays the contents of the flake's lock file in human-readable form. - -)"" diff --git a/src/nix/flake-info.md b/src/nix/flake-metadata.md similarity index 75% rename from src/nix/flake-info.md rename to src/nix/flake-metadata.md index fda3171db..5a009409b 100644 --- a/src/nix/flake-info.md +++ b/src/nix/flake-metadata.md @@ -5,19 +5,24 @@ R""( * Show what `nixpkgs` resolves to: ```console - # nix flake info nixpkgs - Resolved URL: github:NixOS/nixpkgs - Locked URL: github:NixOS/nixpkgs/b67ba0bfcc714453cdeb8d713e35751eb8b4c8f4 - Description: A collection of packages for the Nix package manager - Path: /nix/store/23qapccs6cfmwwrlq8kr41vz5vdmns3r-source - Revision: b67ba0bfcc714453cdeb8d713e35751eb8b4c8f4 - Last modified: 2020-12-23 12:36:12 + # nix flake metadata nixpkgs + Resolved URL: github:edolstra/dwarffs + Locked URL: github:edolstra/dwarffs/f691e2c991e75edb22836f1dbe632c40324215c5 + Description: A filesystem that fetches DWARF debug info from the Internet on demand + Path: /nix/store/769s05vjydmc2lcf6b02az28wsa9ixh1-source + Revision: f691e2c991e75edb22836f1dbe632c40324215c5 + Last modified: 2021-01-21 15:41:26 + Inputs: + ├───nix: github:NixOS/nix/6254b1f5d298ff73127d7b0f0da48f142bdc753c + │ ├───lowdown-src: github:kristapsdz/lowdown/1705b4a26fbf065d9574dce47a94e8c7c79e052f + │ └───nixpkgs: github:NixOS/nixpkgs/ad0d20345219790533ebe06571f82ed6b034db31 + └───nixpkgs follows input 'nix/nixpkgs' ``` * Show information about `dwarffs` in JSON format: ```console - # nix flake info dwarffs --json | jq . + # nix flake metadata dwarffs --json | jq . { "description": "A filesystem that fetches DWARF debug info from the Internet on demand", "lastModified": 1597153508, @@ -29,6 +34,7 @@ R""( "rev": "d181d714fd36eb06f4992a1997cd5601e26db8f5", "type": "github" }, + "locks": { ... }, "original": { "id": "dwarffs", "type": "indirect" @@ -75,6 +81,9 @@ data. This includes: time of the commit of the locked flake; for tarball flakes, it's the most recent timestamp of any file inside the tarball. +* `Inputs`: The flake inputs with their corresponding lock file + entries. + With `--json`, the output is a JSON object with the following fields: * `original` and `originalUrl`: The flake reference specified by the @@ -96,4 +105,6 @@ With `--json`, the output is a JSON object with the following fields: * `lastModified`: See `Last modified` above. +* `locks`: The contents of `flake.lock`. + )"" diff --git a/src/nix/flake.cc b/src/nix/flake.cc index d37791aba..5ce2e082c 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -43,12 +43,6 @@ public: return parseFlakeRef(flakeUrl, absPath(".")); //FIXME } - Flake getFlake() - { - auto evalState = getEvalState(); - return flake::getFlake(*evalState, getFlakeRef(), lockFlags.useRegistries); - } - LockedFlake lockFlake() { return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags); @@ -60,43 +54,6 @@ public: } }; -static void printFlakeInfo(const Store & store, const Flake & flake) -{ - logger->cout("Resolved URL: %s", flake.resolvedRef.to_string()); - logger->cout("Locked URL: %s", flake.lockedRef.to_string()); - if (flake.description) - logger->cout("Description: %s", *flake.description); - logger->cout("Path: %s", store.printStorePath(flake.sourceInfo->storePath)); - if (auto rev = flake.lockedRef.input.getRev()) - logger->cout("Revision: %s", rev->to_string(Base16, false)); - if (auto revCount = flake.lockedRef.input.getRevCount()) - logger->cout("Revisions: %s", *revCount); - if (auto lastModified = flake.lockedRef.input.getLastModified()) - logger->cout("Last modified: %s", - std::put_time(std::localtime(&*lastModified), "%F %T")); -} - -static nlohmann::json flakeToJSON(const Store & store, const Flake & flake) -{ - nlohmann::json j; - if (flake.description) - j["description"] = *flake.description; - j["originalUrl"] = flake.originalRef.to_string(); - j["original"] = fetchers::attrsToJSON(flake.originalRef.toAttrs()); - j["resolvedUrl"] = flake.resolvedRef.to_string(); - j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs()); - j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl - j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs()); - if (auto rev = flake.lockedRef.input.getRev()) - j["revision"] = rev->to_string(Base16, false); - if (auto revCount = flake.lockedRef.input.getRevCount()) - j["revCount"] = *revCount; - if (auto lastModified = flake.lockedRef.input.getLastModified()) - j["lastModified"] = *lastModified; - j["path"] = store.printStorePath(flake.sourceInfo->storePath); - return j; -} - struct CmdFlakeUpdate : FlakeCommand { std::string description() override @@ -175,54 +132,72 @@ static void enumerateOutputs(EvalState & state, Value & vFlake, callback(attr.name, *attr.value, *attr.pos); } -struct CmdFlakeInfo : FlakeCommand, MixJSON +struct CmdFlakeMetadata : FlakeCommand, MixJSON { std::string description() override { - return "list info about a given flake"; + return "show flake metadata"; } std::string doc() override { return - #include "flake-info.md" + #include "flake-metadata.md" ; } void run(nix::ref store) override { - auto flake = getFlake(); + auto lockedFlake = lockFlake(); + auto & flake = lockedFlake.flake; if (json) { - auto json = flakeToJSON(*store, flake); - logger->cout("%s", json.dump()); - } else - printFlakeInfo(*store, flake); - } -}; + nlohmann::json j; + if (flake.description) + j["description"] = *flake.description; + j["originalUrl"] = flake.originalRef.to_string(); + j["original"] = fetchers::attrsToJSON(flake.originalRef.toAttrs()); + j["resolvedUrl"] = flake.resolvedRef.to_string(); + j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs()); + j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl + j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs()); + if (auto rev = flake.lockedRef.input.getRev()) + j["revision"] = rev->to_string(Base16, false); + if (auto revCount = flake.lockedRef.input.getRevCount()) + j["revCount"] = *revCount; + if (auto lastModified = flake.lockedRef.input.getLastModified()) + j["lastModified"] = *lastModified; + j["path"] = store->printStorePath(flake.sourceInfo->storePath); + j["locks"] = lockedFlake.lockFile.toJSON(); + logger->cout("%s", j.dump()); + } else { + logger->cout( + ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s", + flake.resolvedRef.to_string()); + logger->cout( + ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s", + flake.lockedRef.to_string()); + if (flake.description) + logger->cout( + ANSI_BOLD "Description:" ANSI_NORMAL " %s", + *flake.description); + logger->cout( + ANSI_BOLD "Path:" ANSI_NORMAL " %s", + store->printStorePath(flake.sourceInfo->storePath)); + if (auto rev = flake.lockedRef.input.getRev()) + logger->cout( + ANSI_BOLD "Revision:" ANSI_NORMAL " %s", + rev->to_string(Base16, false)); + if (auto revCount = flake.lockedRef.input.getRevCount()) + logger->cout( + ANSI_BOLD "Revisions:" ANSI_NORMAL " %s", + *revCount); + if (auto lastModified = flake.lockedRef.input.getLastModified()) + logger->cout( + ANSI_BOLD "Last modified:" ANSI_NORMAL " %s", + std::put_time(std::localtime(&*lastModified), "%F %T")); -struct CmdFlakeListInputs : FlakeCommand, MixJSON -{ - std::string description() override - { - return "list flake inputs"; - } - - std::string doc() override - { - return - #include "flake-list-inputs.md" - ; - } - - void run(nix::ref store) override - { - auto flake = lockFlake(); - - if (json) - logger->cout("%s", flake.lockFile.toJSON()); - else { - logger->cout("%s", flake.flake.lockedRef); + logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL); std::unordered_set> visited; @@ -236,7 +211,7 @@ struct CmdFlakeListInputs : FlakeCommand, MixJSON if (auto lockedNode = std::get_if<0>(&input.second)) { logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s", prefix + (last ? treeLast : treeConn), input.first, - *lockedNode ? (*lockedNode)->lockedRef : flake.flake.lockedRef); + *lockedNode ? (*lockedNode)->lockedRef : flake.lockedRef); bool firstVisit = visited.insert(*lockedNode).second; @@ -249,8 +224,8 @@ struct CmdFlakeListInputs : FlakeCommand, MixJSON } }; - visited.insert(flake.lockFile.root); - recurse(*flake.lockFile.root, ""); + visited.insert(lockedFlake.lockFile.root); + recurse(*lockedFlake.lockFile.root, ""); } } }; @@ -1048,8 +1023,7 @@ struct CmdFlake : NixMultiCommand : MultiCommand({ {"update", []() { return make_ref(); }}, {"lock", []() { return make_ref(); }}, - {"info", []() { return make_ref(); }}, - {"list-inputs", []() { return make_ref(); }}, + {"metadata", []() { return make_ref(); }}, {"check", []() { return make_ref(); }}, {"init", []() { return make_ref(); }}, {"new", []() { return make_ref(); }}, diff --git a/tests/flakes.sh b/tests/flakes.sh index 9747aba7a..e78e4a39d 100644 --- a/tests/flakes.sh +++ b/tests/flakes.sh @@ -164,16 +164,17 @@ EOF # Test 'nix flake list'. [[ $(nix registry list | wc -l) == 7 ]] -# Test 'nix flake info'. -nix flake info flake1 | grep -q 'URL: .*flake1.*' +# Test 'nix flake metadata'. +nix flake metadata flake1 +nix flake metadata flake1 | grep -q 'Locked URL:.*flake1.*' -# Test 'nix flake info' on a local flake. -(cd $flake1Dir && nix flake info) | grep -q 'URL: .*flake1.*' -(cd $flake1Dir && nix flake info .) | grep -q 'URL: .*flake1.*' -nix flake info $flake1Dir | grep -q 'URL: .*flake1.*' +# Test 'nix flake metadata' on a local flake. +(cd $flake1Dir && nix flake metadata) | grep -q 'URL:.*flake1.*' +(cd $flake1Dir && nix flake metadata .) | grep -q 'URL:.*flake1.*' +nix flake metadata $flake1Dir | grep -q 'URL:.*flake1.*' -# Test 'nix flake info --json'. -json=$(nix flake info flake1 --json | jq .) +# Test 'nix flake metadata --json'. +json=$(nix flake metadata flake1 --json | jq .) [[ $(echo "$json" | jq -r .description) = 'Bla bla' ]] [[ -d $(echo "$json" | jq -r .path) ]] [[ $(echo "$json" | jq -r .lastModified) = $(git -C $flake1Dir log -n1 --format=%ct) ]] @@ -181,7 +182,7 @@ hash1=$(echo "$json" | jq -r .revision) echo -n '# foo' >> $flake1Dir/flake.nix git -C $flake1Dir commit -a -m 'Foo' -hash2=$(nix flake info flake1 --json --refresh | jq -r .revision) +hash2=$(nix flake metadata flake1 --json --refresh | jq -r .revision) [[ $hash1 != $hash2 ]] # Test 'nix build' on a flake. @@ -630,7 +631,7 @@ hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit' nix build -o $TEST_ROOT/result hg+file://$flake5Dir [[ -e $TEST_ROOT/result/hello ]] -(! nix flake info --json hg+file://$flake5Dir | jq -e -r .revision) +(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision) nix eval hg+file://$flake5Dir#expr @@ -638,13 +639,13 @@ nix eval hg+file://$flake5Dir#expr (! nix eval hg+file://$flake5Dir#expr --no-allow-dirty) -(! nix flake info --json hg+file://$flake5Dir | jq -e -r .revision) +(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision) hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file' -nix flake info --json hg+file://$flake5Dir --refresh | jq -e -r .revision -nix flake info --json hg+file://$flake5Dir -[[ $(nix flake info --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]] +nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision +nix flake metadata --json hg+file://$flake5Dir +[[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]] nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty @@ -654,7 +655,7 @@ tar cfz $TEST_ROOT/flake.tar.gz -C $TEST_ROOT --exclude .hg flake5 nix build -o $TEST_ROOT/result file://$TEST_ROOT/flake.tar.gz # Building with a tarball URL containing a SRI hash should also work. -url=$(nix flake info --json file://$TEST_ROOT/flake.tar.gz | jq -r .url) +url=$(nix flake metadata --json file://$TEST_ROOT/flake.tar.gz | jq -r .url) [[ $url =~ sha256- ]] nix build -o $TEST_ROOT/result $url @@ -680,9 +681,8 @@ nix flake lock $flake3Dir nix flake lock $flake3Dir --update-input flake2/flake1 [[ $(jq -r .nodes.flake1_2.locked.rev $flake3Dir/flake.lock) =~ $hash2 ]] -# Test 'nix flake list-inputs'. -[[ $(nix flake list-inputs $flake3Dir | wc -l) == 5 ]] -nix flake list-inputs $flake3Dir --json | jq . +# Test 'nix flake metadata --json'. +nix flake metadata $flake3Dir --json | jq . # Test circular flake dependencies. cat > $flakeA/flake.nix < Date: Tue, 16 Mar 2021 18:51:17 +0100 Subject: [PATCH 042/510] Fix Nix to properly work with stores using a scoped IPv6 address According to RFC4007[1], IPv6 addresses can have a so-called zone_id separated from the actual address with `%` as delimiter. In contrast to Nix 2.3, the version on `master` doesn't recognize it as such: $ nix ping-store --store ssh://root@fe80::1%18 --experimental-features nix-command warning: 'ping-store' is a deprecated alias for 'store ping' error: --- Error ----------------------------------------------------------------- nix don't know how to open Nix store 'ssh://root@fe80::1%18' I modified the IPv6 match-regex accordingly to optionally detect this part of the address. As we don't seem to do anything special with it, I decided to leave it as part of the URL for now. Fixes #4490 [1] https://tools.ietf.org/html/rfc4007 --- src/libutil/tests/url.cc | 18 ++++++++++++++++++ src/libutil/url-parts.hh | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/libutil/tests/url.cc b/src/libutil/tests/url.cc index 80646ad3e..aff58e9ee 100644 --- a/src/libutil/tests/url.cc +++ b/src/libutil/tests/url.cc @@ -117,6 +117,24 @@ namespace nix { ASSERT_EQ(parsed, expected); } + TEST(parseURL, parseScopedRFC4007IPv6Address) { + auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080"; + auto parsed = parseURL(s); + + ParsedURL expected { + .url = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080", + .base = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080", + .scheme = "http", + .authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080", + .path = "", + .query = (StringMap) { }, + .fragment = "", + }; + + ASSERT_EQ(parsed, expected); + + } + TEST(parseURL, parseIPv6Address) { auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080"; auto parsed = parseURL(s); diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 862d9fa6e..da10a6bbc 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -8,7 +8,7 @@ namespace nix { // URI stuff. const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])"; const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)"; -const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+"; +const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+(?:%\\w+)?"; const static std::string ipv6AddressRegex = "(?:\\[" + ipv6AddressSegmentRegex + "\\]|" + ipv6AddressSegmentRegex + ")"; const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])"; const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])"; From a61112aadf58c1578cbdcba32b1582d25ca7ed9b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 17 Mar 2021 11:27:11 +0100 Subject: [PATCH 043/510] Remove unimplemented hashAlgoOpt It was in the header but never implemented. --- src/libstore/derivations.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 061d70f69..2df440536 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -52,7 +52,7 @@ struct DerivationOutput DerivationOutputCAFloating, DerivationOutputDeferred > output; - std::optional hashAlgoOpt(const Store & store) const; + /* Note, when you use this function you should make sure that you're passing the right derivation name. When in doubt, you should use the safer interface provided by BasicDerivation::outputsAndOptPaths */ From ef83ced4e170130cb6f9acd1d253351b02490658 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 19 Mar 2021 17:21:37 +0100 Subject: [PATCH 044/510] Restore 'nix flake info' as a deprecated alias --- src/nix/flake.cc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 5ce2e082c..a2b6c0303 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -230,6 +230,15 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON } }; +struct CmdFlakeInfo : CmdFlakeMetadata +{ + void run(nix::ref store) override + { + warn("'nix flake info' is a deprecated alias for 'nix flake metadata'"); + CmdFlakeMetadata::run(store); + } +}; + struct CmdFlakeCheck : FlakeCommand { bool build = true; @@ -1024,6 +1033,7 @@ struct CmdFlake : NixMultiCommand {"update", []() { return make_ref(); }}, {"lock", []() { return make_ref(); }}, {"metadata", []() { return make_ref(); }}, + {"info", []() { return make_ref(); }}, {"check", []() { return make_ref(); }}, {"init", []() { return make_ref(); }}, {"new", []() { return make_ref(); }}, From 3e0e443181997c52b0db19ae781948c573a634dd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 19 Mar 2021 17:56:39 +0100 Subject: [PATCH 045/510] ProgressBar: Respect verbosity level This makes its behaviour consistent with SimpleLogger. --- src/libmain/progress-bar.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index 0e5432fca..15354549a 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -122,6 +122,7 @@ public: void log(Verbosity lvl, const FormatOrString & fs) override { + if (lvl > verbosity) return; auto state(state_.lock()); log(*state, lvl, fs.s); } From 1765711b68c8647b502c2c009dace9632e9300d7 Mon Sep 17 00:00:00 2001 From: Samuel Dionne-Riel Date: Wed, 17 Mar 2021 18:43:37 -0400 Subject: [PATCH 046/510] tests/config: Fix config test configuration First, "XDG_CONFIG_HOME" shouldn't be named "home", as it may be confusing compared with `$HOME`, which an upcoming test will be using. Then, using a fixed location for the test is problematic. Use `$TEST_ROOT` instead. --- tests/config.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/config.sh b/tests/config.sh index eaa46c395..1edc09c1a 100644 --- a/tests/config.sh +++ b/tests/config.sh @@ -1,15 +1,15 @@ source common.sh # Test that files are loaded from XDG by default -export XDG_CONFIG_HOME=/tmp/home -export XDG_CONFIG_DIRS=/tmp/dir1:/tmp/dir2 +export XDG_CONFIG_HOME=$TEST_ROOT/confighome +export XDG_CONFIG_DIRS=$TEST_ROOT/dir1:$TEST_ROOT/dir2 files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | xargs) -[[ $files == "/tmp/home/nix/nix.conf:/tmp/dir1/nix/nix.conf:/tmp/dir2/nix/nix.conf" ]] +[[ $files == "$TEST_ROOT/confighome/nix/nix.conf:$TEST_ROOT/dir1/nix/nix.conf:$TEST_ROOT/dir2/nix/nix.conf" ]] # Test that setting NIX_USER_CONF_FILES overrides all the default user config files -export NIX_USER_CONF_FILES=/tmp/file1.conf:/tmp/file2.conf +export NIX_USER_CONF_FILES=$TEST_ROOT/file1.conf:$TEST_ROOT/file2.conf files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | xargs) -[[ $files == "/tmp/file1.conf:/tmp/file2.conf" ]] +[[ $files == "$TEST_ROOT/file1.conf:$TEST_ROOT/file2.conf" ]] # Test that it's possible to load the config from a custom location here=$(readlink -f "$(dirname "${BASH_SOURCE[0]}")") @@ -24,4 +24,4 @@ exp_cores=$(nix show-config | grep '^cores' | cut -d '=' -f 2 | xargs) exp_features=$(nix show-config | grep '^experimental-features' | cut -d '=' -f 2 | xargs) [[ $prev != $exp_cores ]] [[ $exp_cores == "4242" ]] -[[ $exp_features == "nix-command flakes" ]] \ No newline at end of file +[[ $exp_features == "nix-command flakes" ]] From bf07581497d55ade85d80e5d9ad9bf5d962e3403 Mon Sep 17 00:00:00 2001 From: Samuel Dionne-Riel Date: Wed, 17 Mar 2021 19:02:11 -0400 Subject: [PATCH 047/510] tests: Test `.config` stays clean with XDG_CONFIG_HOME set --- tests/config.sh | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tests/config.sh b/tests/config.sh index 1edc09c1a..01c78f2c3 100644 --- a/tests/config.sh +++ b/tests/config.sh @@ -1,5 +1,31 @@ source common.sh +# Isolate the home for this test. +# Other tests (e.g. flake registry tests) could be writing to $HOME in parallel. +export HOME=$TEST_ROOT/userhome + +# Test that using XDG_CONFIG_HOME works +# Assert the config folder didn't exist initially. +[ ! -e "$HOME/.config" ] +# Without XDG_CONFIG_HOME, creates $HOME/.config +unset XDG_CONFIG_HOME +# Run against the nix registry to create the config dir +# (Tip: this relies on removing non-existent entries being a no-op!) +nix registry remove userhome-without-xdg +# Verifies it created it +[ -e "$HOME/.config" ] +# Remove the directory it created +rm -rf "$HOME/.config" +# Run the same test, but with XDG_CONFIG_HOME +export XDG_CONFIG_HOME=$TEST_ROOT/confighome +# Assert the XDG_CONFIG_HOME/nix path does not exist yet. +[ ! -e "$TEST_ROOT/confighome/nix" ] +nix registry remove userhome-with-xdg +# Verifies the confighome path has been created +[ -e "$TEST_ROOT/confighome/nix" ] +# Assert the .config folder hasn't been created. +[ ! -e "$HOME/.config" ] + # Test that files are loaded from XDG by default export XDG_CONFIG_HOME=$TEST_ROOT/confighome export XDG_CONFIG_DIRS=$TEST_ROOT/dir1:$TEST_ROOT/dir2 From 66b857244ff062f6bb97c23e2423338ad242f7a1 Mon Sep 17 00:00:00 2001 From: Samuel Dionne-Riel Date: Wed, 17 Mar 2021 17:56:57 -0400 Subject: [PATCH 048/510] Use the appropriate config dir for the registry --- src/libfetchers/registry.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 81b2227de..74376adc0 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -114,7 +114,7 @@ static std::shared_ptr getSystemRegistry() Path getUserRegistryPath() { - return getHome() + "/.config/nix/registry.json"; + return getConfigDir() + "/nix/registry.json"; } std::shared_ptr getUserRegistry() From 9d309de0de9a09d36717abd02a66b51815397d66 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 28 Feb 2021 18:42:46 +0000 Subject: [PATCH 049/510] Clean up serialization for `BuildResult` A few versioning mistakes were corrected: - In 27b5747ca7b5599768083dde5fa4d36bfbb0f66f, Daemon protocol had some version `>= 0xc` that should have been `>= 0x1c`, or `28` since the other conditions used decimal. - In a2b69660a9b326b95d48bd222993c5225bbd5b5f, legacy SSH gated new CAS info on version 6, but version 5 in the server. It is now 6 everywhere. Additionally, legacy ssh was sending over more metadata than the daemon one was. The daemon now sends that data too. CC @regnat Co-authored-by: Cole Helbling --- src/libstore/daemon.cc | 5 ++++- src/libstore/remote-store.cc | 20 ++++++++++++++------ src/libstore/serve-protocol.hh | 2 +- src/libstore/worker-protocol.hh | 2 +- src/nix-store/nix-store.cc | 2 +- 5 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index ba7959263..dc9cd2cbd 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -575,7 +575,10 @@ static void performOp(TunnelLogger * logger, ref store, auto res = store->buildDerivation(drvPath, drv, buildMode); logger->stopWork(); to << res.status << res.errorMsg; - if (GET_PROTOCOL_MINOR(clientVersion) >= 0xc) { + if (GET_PROTOCOL_MINOR(clientVersion) >= 29) { + out << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime; + } + if (GET_PROTOCOL_MINOR(clientVersion) >= 28) { worker_proto::write(*store, to, res.builtOutputs); } break; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 0d884389a..b01cb5a62 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -62,9 +62,15 @@ void write(const Store & store, Sink & out, const Realisation & realisation) { out << realisation.toJSON().dump(); } DrvOutput read(const Store & store, Source & from, Phantom _) -{ return DrvOutput::parse(readString(from)); } +{ + return DrvOutput::parse(readString(from)); +} + void write(const Store & store, Sink & out, const DrvOutput & drvOutput) -{ out << drvOutput.to_string(); } +{ + out << drvOutput.to_string(); +} + std::optional read(const Store & store, Source & from, Phantom> _) { @@ -677,10 +683,12 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD conn->to << buildMode; conn.processStderr(); BuildResult res; - unsigned int status; - conn->from >> status >> res.errorMsg; - res.status = (BuildResult::Status) status; - if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0xc) { + res.status = (BuildResult::Status) readInt(conn->from); + conn->from >> res.errorMsg; + if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) { + from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime; + } + if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 28) { auto builtOutputs = worker_proto::read(*this, conn->from, Phantom {}); res.builtOutputs = builtOutputs; } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 0a17387cb..02d0810cc 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -5,7 +5,7 @@ namespace nix { #define SERVE_MAGIC_1 0x390c9deb #define SERVE_MAGIC_2 0x5452eecb -#define SERVE_PROTOCOL_VERSION 0x206 +#define SERVE_PROTOCOL_VERSION (2 << 8 | 6) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 95f08bc9a..be071dd78 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -9,7 +9,7 @@ namespace nix { #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f -#define PROTOCOL_VERSION 0x11c +#define PROTOCOL_VERSION (1 << 8 | 29) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 94d4881dd..b684feccb 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -905,7 +905,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (GET_PROTOCOL_MINOR(clientVersion) >= 3) out << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime; - if (GET_PROTOCOL_MINOR(clientVersion >= 5)) { + if (GET_PROTOCOL_MINOR(clientVersion >= 6)) { worker_proto::write(*store, out, status.builtOutputs); } From f44206e71953501af502354ab1c747aa2412d676 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Mar 2021 15:18:48 +0000 Subject: [PATCH 050/510] Fix typos in the last PR #4656 --- src/libstore/daemon.cc | 2 +- src/libstore/remote-store.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index dc9cd2cbd..f28ab6438 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -576,7 +576,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->stopWork(); to << res.status << res.errorMsg; if (GET_PROTOCOL_MINOR(clientVersion) >= 29) { - out << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime; + to << res.timesBuilt << res.isNonDeterministic << res.startTime << res.stopTime; } if (GET_PROTOCOL_MINOR(clientVersion) >= 28) { worker_proto::write(*store, to, res.builtOutputs); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index b01cb5a62..ccf095dc2 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -686,7 +686,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD res.status = (BuildResult::Status) readInt(conn->from); conn->from >> res.errorMsg; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) { - from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime; + conn->from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime; } if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 28) { auto builtOutputs = worker_proto::read(*this, conn->from, Phantom {}); From 0f40561c78bb5566b42d054620b0576e14fe4627 Mon Sep 17 00:00:00 2001 From: DavHau Date: Tue, 23 Mar 2021 10:19:00 +0700 Subject: [PATCH 051/510] nix.conf builders: refer to manual page --- src/libstore/globals.hh | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index bf0767dfa..3e4ead76c 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -207,15 +207,8 @@ public: Setting builders{ this, "@" + nixConfDir + "/machines", "builders", R"( - A semicolon-separated list of build machines, where each machine follows this format: - - {protocol}://{user}@{host} [{comma sep. systems} - {maxJobs} {speedFactor} {comma sep. features}] - - Examples: - - ssh://root@builder1.com - - ssh://root@builder2.com x86_64-linux,aarch64-linux - 40 20 nixos-test,benchmark,big-parallel,kvm + A semicolon-separated list of build machines. + For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md) )"}; Setting buildersUseSubstitutes{ From 71f92741ec979c1059938a638b7fc8da6d7b0936 Mon Sep 17 00:00:00 2001 From: Nicolas Stig124 FORMICHELLA Date: Tue, 23 Mar 2021 16:23:24 +0100 Subject: [PATCH 052/510] Added Debian-based OS's profiles --- scripts/install-multi-user.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 5e8b4ac18..4cc11d210 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -31,7 +31,7 @@ readonly NIX_FIRST_BUILD_UID="30001" readonly NIX_ROOT="/nix" readonly NIX_EXTRA_CONF=${NIX_EXTRA_CONF:-} -readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshenv") +readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshenv" "/etc/bash.bashrc" "/etc/zsh/zshenv") readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" From 4638bcfb2cfb74cb5029c0da0af38bb7ca4b4a6f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 26 Mar 2021 16:14:38 +0100 Subject: [PATCH 053/510] Fix some typos Fixes #4671. --- doc/manual/src/command-ref/nix-store.md | 2 +- src/libexpr/flake/flake.hh | 2 +- src/libstore/globals.cc | 2 +- src/libutil/config.cc | 28 +++++++++++----------- src/libutil/config.hh | 22 ++++++++--------- src/libutil/tests/config.cc | 32 ++++++++++++------------- src/libutil/util.cc | 2 +- src/nix/build.md | 2 +- src/nix/flake-init.md | 2 +- src/nix/flake.md | 4 ++-- src/nix/main.cc | 8 +++---- src/nix/store-prefetch-file.md | 2 +- 12 files changed, 54 insertions(+), 54 deletions(-) diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md index 361c20cc9..49d06f31e 100644 --- a/doc/manual/src/command-ref/nix-store.md +++ b/doc/manual/src/command-ref/nix-store.md @@ -79,7 +79,7 @@ paths. Realisation is a somewhat overloaded term: system). If the path is already valid, we are done immediately. Otherwise, the path and any missing paths in its closure may be produced through substitutes. If there are no (successful) - subsitutes, realisation fails. + substitutes, realisation fails. The output path of each derivation is printed on standard output. (For non-derivations argument, the argument itself is printed.) diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh index 65ed1ad0a..d17d5e183 100644 --- a/src/libexpr/flake/flake.hh +++ b/src/libexpr/flake/flake.hh @@ -113,7 +113,7 @@ struct LockFlags /* Whether to commit changes to flake.lock. */ bool commitLockFile = false; - /* Flake inputs to be overriden. */ + /* Flake inputs to be overridden. */ std::map inputOverrides; /* Flake inputs to be updated. This means that any existing lock diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 8d44003f4..d3b27d7be 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -81,7 +81,7 @@ void loadConfFile() /* We only want to send overrides to the daemon, i.e. stuff from ~/.nix/nix.conf or the command line. */ - globalConfig.resetOverriden(); + globalConfig.resetOverridden(); auto files = settings.nixUserConfFiles; for (auto file = files.rbegin(); file != files.rend(); file++) { diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 7467e5ac0..bda07cd55 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -20,7 +20,7 @@ bool Config::set(const std::string & name, const std::string & value) return false; } i->second.setting->set(value, append); - i->second.setting->overriden = true; + i->second.setting->overridden = true; return true; } @@ -35,7 +35,7 @@ void Config::addSetting(AbstractSetting * setting) auto i = unknownSettings.find(setting->name); if (i != unknownSettings.end()) { setting->set(i->second); - setting->overriden = true; + setting->overridden = true; unknownSettings.erase(i); set = true; } @@ -48,7 +48,7 @@ void Config::addSetting(AbstractSetting * setting) alias, setting->name); else { setting->set(i->second); - setting->overriden = true; + setting->overridden = true; unknownSettings.erase(i); set = true; } @@ -69,10 +69,10 @@ void AbstractConfig::reapplyUnknownSettings() set(s.first, s.second); } -void Config::getSettings(std::map & res, bool overridenOnly) +void Config::getSettings(std::map & res, bool overriddenOnly) { for (auto & opt : _settings) - if (!opt.second.isAlias && (!overridenOnly || opt.second.setting->overriden)) + if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden)) res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description}); } @@ -136,10 +136,10 @@ void AbstractConfig::applyConfigFile(const Path & path) } catch (SysError &) { } } -void Config::resetOverriden() +void Config::resetOverridden() { for (auto & s : _settings) - s.second.setting->overriden = false; + s.second.setting->overridden = false; } nlohmann::json Config::toJSON() @@ -169,7 +169,7 @@ AbstractSetting::AbstractSetting( void AbstractSetting::setDefault(const std::string & str) { - if (!overriden) set(str); + if (!overridden) set(str); } nlohmann::json AbstractSetting::toJSON() @@ -203,7 +203,7 @@ void BaseSetting::convertToArg(Args & args, const std::string & category) .description = fmt("Set the `%s` setting.", name), .category = category, .labels = {"value"}, - .handler = {[=](std::string s) { overriden = true; set(s); }}, + .handler = {[=](std::string s) { overridden = true; set(s); }}, }); if (isAppendable()) @@ -212,7 +212,7 @@ void BaseSetting::convertToArg(Args & args, const std::string & category) .description = fmt("Append to the `%s` setting.", name), .category = category, .labels = {"value"}, - .handler = {[=](std::string s) { overriden = true; set(s, true); }}, + .handler = {[=](std::string s) { overridden = true; set(s, true); }}, }); } @@ -365,16 +365,16 @@ bool GlobalConfig::set(const std::string & name, const std::string & value) return false; } -void GlobalConfig::getSettings(std::map & res, bool overridenOnly) +void GlobalConfig::getSettings(std::map & res, bool overriddenOnly) { for (auto & config : *configRegistrations) - config->getSettings(res, overridenOnly); + config->getSettings(res, overriddenOnly); } -void GlobalConfig::resetOverriden() +void GlobalConfig::resetOverridden() { for (auto & config : *configRegistrations) - config->resetOverriden(); + config->resetOverridden(); } nlohmann::json GlobalConfig::toJSON() diff --git a/src/libutil/config.hh b/src/libutil/config.hh index 71e31656d..bf81b4892 100644 --- a/src/libutil/config.hh +++ b/src/libutil/config.hh @@ -71,9 +71,9 @@ public: /** * Adds the currently known settings to the given result map `res`. * - res: map to store settings in - * - overridenOnly: when set to true only overridden settings will be added to `res` + * - overriddenOnly: when set to true only overridden settings will be added to `res` */ - virtual void getSettings(std::map & res, bool overridenOnly = false) = 0; + virtual void getSettings(std::map & res, bool overriddenOnly = false) = 0; /** * Parses the configuration in `contents` and applies it @@ -91,7 +91,7 @@ public: /** * Resets the `overridden` flag of all Settings */ - virtual void resetOverriden() = 0; + virtual void resetOverridden() = 0; /** * Outputs all settings to JSON @@ -127,7 +127,7 @@ public: MyClass() : Config(readConfigFile("/etc/my-app.conf")) { - std::cout << foo << "\n"; // will print 123 unless overriden + std::cout << foo << "\n"; // will print 123 unless overridden } }; */ @@ -163,9 +163,9 @@ public: void addSetting(AbstractSetting * setting); - void getSettings(std::map & res, bool overridenOnly = false) override; + void getSettings(std::map & res, bool overriddenOnly = false) override; - void resetOverriden() override; + void resetOverridden() override; nlohmann::json toJSON() override; @@ -184,7 +184,7 @@ public: int created = 123; - bool overriden = false; + bool overridden = false; void setDefault(const std::string & str); @@ -215,7 +215,7 @@ protected: virtual void convertToArg(Args & args, const std::string & category); - bool isOverriden() const { return overriden; } + bool isOverridden() const { return overridden; } }; /* A setting of type T. */ @@ -252,7 +252,7 @@ public: virtual void override(const T & v) { - overriden = true; + overridden = true; value = v; } @@ -324,9 +324,9 @@ struct GlobalConfig : public AbstractConfig bool set(const std::string & name, const std::string & value) override; - void getSettings(std::map & res, bool overridenOnly = false) override; + void getSettings(std::map & res, bool overriddenOnly = false) override; - void resetOverriden() override; + void resetOverridden() override; nlohmann::json toJSON() override; diff --git a/src/libutil/tests/config.cc b/src/libutil/tests/config.cc index c305af9f5..0ebdaf3db 100644 --- a/src/libutil/tests/config.cc +++ b/src/libutil/tests/config.cc @@ -29,20 +29,20 @@ namespace nix { std::map settings; Setting foo{&config, value, "name-of-the-setting", "description"}; - config.getSettings(settings, /* overridenOnly = */ false); + config.getSettings(settings, /* overriddenOnly = */ false); const auto iter = settings.find("name-of-the-setting"); ASSERT_NE(iter, settings.end()); ASSERT_EQ(iter->second.value, ""); ASSERT_EQ(iter->second.description, "description\n"); } - TEST(Config, getDefinedOverridenSettingNotSet) { + TEST(Config, getDefinedOverriddenSettingNotSet) { Config config; std::string value; std::map settings; Setting foo{&config, value, "name-of-the-setting", "description"}; - config.getSettings(settings, /* overridenOnly = */ true); + config.getSettings(settings, /* overriddenOnly = */ true); const auto e = settings.find("name-of-the-setting"); ASSERT_EQ(e, settings.end()); } @@ -55,7 +55,7 @@ namespace nix { setting.assign("value"); - config.getSettings(settings, /* overridenOnly = */ false); + config.getSettings(settings, /* overriddenOnly = */ false); const auto iter = settings.find("name-of-the-setting"); ASSERT_NE(iter, settings.end()); ASSERT_EQ(iter->second.value, "value"); @@ -69,7 +69,7 @@ namespace nix { ASSERT_TRUE(config.set("name-of-the-setting", "value")); - config.getSettings(settings, /* overridenOnly = */ false); + config.getSettings(settings, /* overriddenOnly = */ false); const auto e = settings.find("name-of-the-setting"); ASSERT_NE(e, settings.end()); ASSERT_EQ(e->second.value, "value"); @@ -100,7 +100,7 @@ namespace nix { { std::map settings; - config.getSettings(settings, /* overridenOnly = */ false); + config.getSettings(settings, /* overriddenOnly = */ false); ASSERT_EQ(settings.find("key"), settings.end()); } @@ -108,17 +108,17 @@ namespace nix { { std::map settings; - config.getSettings(settings, /* overridenOnly = */ false); + config.getSettings(settings, /* overriddenOnly = */ false); ASSERT_EQ(settings["key"].value, "value"); } } - TEST(Config, resetOverriden) { + TEST(Config, resetOverridden) { Config config; - config.resetOverriden(); + config.resetOverridden(); } - TEST(Config, resetOverridenWithSetting) { + TEST(Config, resetOverriddenWithSetting) { Config config; Setting setting{&config, "", "name-of-the-setting", "description"}; @@ -127,7 +127,7 @@ namespace nix { setting.set("foo"); ASSERT_EQ(setting.get(), "foo"); - config.getSettings(settings, /* overridenOnly = */ true); + config.getSettings(settings, /* overriddenOnly = */ true); ASSERT_TRUE(settings.empty()); } @@ -135,18 +135,18 @@ namespace nix { std::map settings; setting.override("bar"); - ASSERT_TRUE(setting.overriden); + ASSERT_TRUE(setting.overridden); ASSERT_EQ(setting.get(), "bar"); - config.getSettings(settings, /* overridenOnly = */ true); + config.getSettings(settings, /* overriddenOnly = */ true); ASSERT_FALSE(settings.empty()); } { std::map settings; - config.resetOverriden(); - ASSERT_FALSE(setting.overriden); - config.getSettings(settings, /* overridenOnly = */ true); + config.resetOverridden(); + ASSERT_FALSE(setting.overridden); + config.getSettings(settings, /* overriddenOnly = */ true); ASSERT_TRUE(settings.empty()); } } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index ef37275ac..dea9c74b7 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -1590,7 +1590,7 @@ void startSignalHandlerThread() updateWindowSize(); if (sigprocmask(SIG_BLOCK, nullptr, &savedSignalMask)) - throw SysError("quering signal mask"); + throw SysError("querying signal mask"); sigset_t set; sigemptyset(&set); diff --git a/src/nix/build.md b/src/nix/build.md index c2f3e387a..20138b7e0 100644 --- a/src/nix/build.md +++ b/src/nix/build.md @@ -81,7 +81,7 @@ path installables are substituted. Unless `--no-link` is specified, after a successful build, it creates symlinks to the store paths of the installables. These symlinks have -the prefix `./result` by default; this can be overriden using the +the prefix `./result` by default; this can be overridden using the `--out-link` option. Each symlink has a suffix `--`, where *N* is the index of the installable (with the left-most installable having index 0), and *outname* is the symbolic derivation output name diff --git a/src/nix/flake-init.md b/src/nix/flake-init.md index c66154ad5..890038016 100644 --- a/src/nix/flake-init.md +++ b/src/nix/flake-init.md @@ -24,7 +24,7 @@ R""( This command creates a flake in the current directory by copying the files of a template. It will not overwrite existing files. The default -template is `templates#defaultTemplate`, but this can be overriden +template is `templates#defaultTemplate`, but this can be overridden using `-t`. # Template definitions diff --git a/src/nix/flake.md b/src/nix/flake.md index 440c45dd1..0035195e5 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -70,7 +70,7 @@ Here are some examples of flake references in their URL-like representation: * `/home/alice/src/patchelf`: A flake in some other directory. * `nixpkgs`: The `nixpkgs` entry in the flake registry. * `nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293`: The `nixpkgs` - entry in the flake registry, with its Git revision overriden to a + entry in the flake registry, with its Git revision overridden to a specific value. * `github:NixOS/nixpkgs`: The `master` branch of the `NixOS/nixpkgs` repository on GitHub. @@ -377,7 +377,7 @@ outputs = { self, nixpkgs, grcov }: { }; ``` -Transitive inputs can be overriden from a `flake.nix` file. For +Transitive inputs can be overridden from a `flake.nix` file. For example, the following overrides the `nixpkgs` input of the `nixops` input: diff --git a/src/nix/main.cc b/src/nix/main.cc index 06e221682..f8701ee56 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -309,13 +309,13 @@ void mainWrapped(int argc, char * * argv) if (!args.useNet) { // FIXME: should check for command line overrides only. - if (!settings.useSubstitutes.overriden) + if (!settings.useSubstitutes.overridden) settings.useSubstitutes = false; - if (!settings.tarballTtl.overriden) + if (!settings.tarballTtl.overridden) settings.tarballTtl = std::numeric_limits::max(); - if (!fileTransferSettings.tries.overriden) + if (!fileTransferSettings.tries.overridden) fileTransferSettings.tries = 0; - if (!fileTransferSettings.connectTimeout.overriden) + if (!fileTransferSettings.connectTimeout.overridden) fileTransferSettings.connectTimeout = 1; } diff --git a/src/nix/store-prefetch-file.md b/src/nix/store-prefetch-file.md index 1663b847b..f9fdcbc57 100644 --- a/src/nix/store-prefetch-file.md +++ b/src/nix/store-prefetch-file.md @@ -27,6 +27,6 @@ the resulting store path and the cryptographic hash of the contents of the file. The name component of the store path defaults to the last component of -*url*, but this can be overriden using `--name`. +*url*, but this can be overridden using `--name`. )"" From dd77f71afe6733e9790dd001125c423cb648b7ce Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 26 Mar 2021 17:10:15 +0100 Subject: [PATCH 054/510] LocalBinaryCacheStore::upsertFile(): Fix race When multiple threads try to upsert the same file, this could fail. Fixes #4667. --- src/libstore/local-binary-cache-store.cc | 5 ++++- tests/ca/substitute.sh | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index a58b7733f..964c4017e 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -2,6 +2,8 @@ #include "globals.hh" #include "nar-info-disk-cache.hh" +#include + namespace nix { struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig @@ -50,7 +52,8 @@ protected: const std::string & mimeType) override { auto path2 = binaryCacheDir + "/" + path; - Path tmp = path2 + ".tmp." + std::to_string(getpid()); + static std::atomic counter{0}; + Path tmp = fmt("%s.tmp.%d.%d", path2, getpid(), ++counter); AutoDelete del(tmp, false); StreamToSourceAdapter source(istream); writeFile(tmp, source); diff --git a/tests/ca/substitute.sh b/tests/ca/substitute.sh index 79a6ef8b1..b44fe499a 100644 --- a/tests/ca/substitute.sh +++ b/tests/ca/substitute.sh @@ -6,6 +6,8 @@ source common.sh sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf +rm -rf $TEST_ROOT/binary_cache + export REMOTE_STORE=file://$TEST_ROOT/binary_cache buildDrvs () { @@ -13,6 +15,7 @@ buildDrvs () { } # Populate the remote cache +clearStore buildDrvs --post-build-hook ../push-to-store.sh # Restart the build on an empty store, ensuring that we don't build From ce791535f63502215d3d41b6ca8d9e62c5fb72e9 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Mar 2021 14:54:05 +0200 Subject: [PATCH 055/510] nixpkgs/master compatibility --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index e59ec9a35..925017472 100644 --- a/flake.nix +++ b/flake.nix @@ -90,7 +90,7 @@ lowdown gmock ] - ++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal] + ++ lib.optionals stdenv.isLinux [libseccomp (pkgs.util-linuxMinimal or pkgs.utillinuxMinimal)] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional stdenv.isx86_64 libcpuid; From edd606ae62e213c2a30ff76b8eea4f75ea703d41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Forsman?= Date: Sat, 27 Mar 2021 14:15:28 +0100 Subject: [PATCH 056/510] fetchGit: don't prefix "refs/heads/" on ref = "HEAD" This fixes builtins.fetchGit { url = ...; ref = "HEAD"; }, that works in stable nix (v2.3.10), but is broken in nix master: $ ./result/bin/nix repl Welcome to Nix version 2.4pre19700101_dd77f71. Type :? for help. nix-repl> builtins.fetchGit { url = "https://github.com/NixOS/nix"; ref = "HEAD"; } fetching Git repository 'https://github.com/NixOS/nix'fatal: couldn't find remote ref refs/heads/HEAD error: program 'git' failed with exit code 128 The documentation for builtins.fetchGit says ref = "HEAD" is the default, so it should also be supported to explicitly pass it. I came across this issue because poetry2nix can use ref = "HEAD" in some situations. Fixes #4674. --- src/libfetchers/git.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 4f9db1bcd..b9a240b13 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -365,7 +365,9 @@ struct GitInputScheme : InputScheme ? "refs/*" : ref->compare(0, 5, "refs/") == 0 ? *ref - : "refs/heads/" + *ref; + : ref == "HEAD" + ? *ref + : "refs/heads/" + *ref; runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }); } catch (Error & e) { if (!pathExists(localRefFile)) throw; From f2a799b16d193a651f682da3ad2103c20ac82d48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Forsman?= Date: Tue, 30 Mar 2021 11:39:37 +0200 Subject: [PATCH 057/510] tests: check that builtins.fetchGit { ..., ref = "HEAD"; } works --- tests/fetchGit.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index 1e8963d76..88744ee7f 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -179,3 +179,13 @@ git clone --depth 1 file://$repo $TEST_ROOT/shallow path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).outPath") [[ $path3 = $path6 ]] [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]] + +# Explicit ref = "HEAD" should work, and produce the same outPath as without ref +path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") +path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; }).outPath") +[[ $path7 = $path8 ]] + +# ref = "HEAD" should fetch the HEAD revision +rev4=$(git -C $repo rev-parse HEAD) +rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).rev") +[[ $rev4 = $rev4_nix ]] From f3f228700a52857fe6e8632df4e935551ea219ff Mon Sep 17 00:00:00 2001 From: Mykola Orliuk Date: Wed, 31 Mar 2021 04:20:41 +0200 Subject: [PATCH 058/510] canonPath in one pass --- src/libutil/util.cc | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/libutil/util.cc b/src/libutil/util.cc index dea9c74b7..c092076f3 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -143,16 +143,18 @@ Path canonPath(const Path & path, bool resolveSymlinks) s += '/'; while (i != end && *i != '/') s += *i++; - /* If s points to a symlink, resolve it and restart (since - the symlink target might contain new symlinks). */ + /* If s points to a symlink, resolve it and continue from there */ if (resolveSymlinks && isLink(s)) { if (++followCount >= maxFollow) throw Error("infinite symlink recursion in path '%1%'", path); - temp = absPath(readLink(s), dirOf(s)) - + string(i, end); - i = temp.begin(); /* restart */ + temp = readLink(s) + string(i, end); + i = temp.begin(); end = temp.end(); - s = ""; + if (!temp.empty() && temp[0] == '/') { + s.clear(); /* restart for symlinks pointing to absolute path */ + } else { + s = dirOf(s); + } } } } From f66fb5fb5b1478a5da39d0e9cc0f835272199c5d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 23 Mar 2021 12:06:43 +0100 Subject: [PATCH 059/510] flake.nix: Build nix with strictDeps = true --- flake.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/flake.nix b/flake.nix index 58dc5019d..adb796a05 100644 --- a/flake.nix +++ b/flake.nix @@ -233,6 +233,8 @@ separateDebugInfo = true; + strictDeps = true; + passthru.perl-bindings = with final; stdenv.mkDerivation { name = "nix-perl-${version}"; @@ -517,6 +519,8 @@ installCheckFlags = "sysconfdir=$(out)/etc"; stripAllList = ["bin"]; + + strictDeps = true; }; }); From c3090bc6fdf6e052cd4c56fce6aeb11ddeb5dd6f Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 24 Mar 2021 14:44:20 +0100 Subject: [PATCH 060/510] tests/*: show when tests are skipped --- tests/build-remote.sh | 4 ++-- tests/gc-runtime.sh | 2 +- tests/linux-sandbox.sh | 4 ++-- tests/recursive.sh | 2 +- tests/shell.sh | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/build-remote.sh b/tests/build-remote.sh index 04848e4b5..70f82e939 100644 --- a/tests/build-remote.sh +++ b/tests/build-remote.sh @@ -1,5 +1,5 @@ -if ! canUseSandbox; then exit; fi -if ! [[ $busybox =~ busybox ]]; then exit; fi +if ! canUseSandbox; then exit 99; fi +if ! [[ $busybox =~ busybox ]]; then exit 99; fi unset NIX_STORE_DIR unset NIX_STATE_DIR diff --git a/tests/gc-runtime.sh b/tests/gc-runtime.sh index 4c5028005..6094959cb 100644 --- a/tests/gc-runtime.sh +++ b/tests/gc-runtime.sh @@ -4,7 +4,7 @@ case $system in *linux*) ;; *) - exit 0; + exit 99; esac set -m # enable job control, needed for kill diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh index 70a90a907..eac62d461 100644 --- a/tests/linux-sandbox.sh +++ b/tests/linux-sandbox.sh @@ -2,13 +2,13 @@ source common.sh clearStore -if ! canUseSandbox; then exit; fi +if ! canUseSandbox; then exit 99; fi # Note: we need to bind-mount $SHELL into the chroot. Currently we # only support the case where $SHELL is in the Nix store, because # otherwise things get complicated (e.g. if it's in /bin, do we need # /lib as well?). -if [[ ! $SHELL =~ /nix/store ]]; then exit; fi +if [[ ! $SHELL =~ /nix/store ]]; then exit 99; fi chmod -R u+w $TEST_ROOT/store0 || true rm -rf $TEST_ROOT/store0 diff --git a/tests/recursive.sh b/tests/recursive.sh index b020ec710..a55b061b5 100644 --- a/tests/recursive.sh +++ b/tests/recursive.sh @@ -1,7 +1,7 @@ source common.sh # FIXME -if [[ $(uname) != Linux ]]; then exit; fi +if [[ $(uname) != Linux ]]; then exit 99; fi clearStore diff --git a/tests/shell.sh b/tests/shell.sh index 7a9ee8ab0..2b85bb337 100644 --- a/tests/shell.sh +++ b/tests/shell.sh @@ -6,7 +6,7 @@ clearCache nix shell -f shell-hello.nix hello -c hello | grep 'Hello World' nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS' -if ! canUseSandbox; then exit; fi +if ! canUseSandbox; then exit 99; fi chmod -R u+w $TEST_ROOT/store0 || true rm -rf $TEST_ROOT/store0 From ff1a2143aa1338ccba0e2bc5ccd66bd3df8baa31 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 24 Mar 2021 14:50:15 +0100 Subject: [PATCH 061/510] flake.nix: Make the sandbox tests work again --- flake.nix | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index adb796a05..9a758eafa 100644 --- a/flake.nix +++ b/flake.nix @@ -78,7 +78,8 @@ buildPackages.git buildPackages.mercurial buildPackages.jq - ]; + ] + ++ lib.optionals stdenv.isLinux [(pkgs.util-linuxMinimal or pkgs.utillinuxMinimal)]; buildDeps = [ curl @@ -90,7 +91,7 @@ lowdown gmock ] - ++ lib.optionals stdenv.isLinux [libseccomp (pkgs.util-linuxMinimal or pkgs.utillinuxMinimal)] + ++ lib.optionals stdenv.isLinux [libseccomp] ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional stdenv.isx86_64 libcpuid; From 5926200db09ca4d0c5769edf24a3cf2e9f472d23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn=20Gohla?= <51823984+cgohla@users.noreply.github.com> Date: Thu, 1 Apr 2021 22:54:09 +0100 Subject: [PATCH 062/510] [prerequisites]: add JSON lib dependency --- doc/manual/src/installation/prerequisites-source.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index 6825af707..12758c5e1 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -69,3 +69,6 @@ `--disable-seccomp-sandboxing` option to the `configure` script (Not recommended unless your system doesn't support `libseccomp`). To get the library, visit . + + - Niels Lohmann's [JSON library](https://github.com/nlohmann/json). + From 00f00a995458776e33fdda692abe2099196ac566 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Fri, 2 Apr 2021 21:32:09 +0200 Subject: [PATCH 063/510] bump actions --- .github/workflows/test.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2531a7d35..33035ca1e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,9 +13,9 @@ jobs: - uses: actions/checkout@v2.3.4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v12 + - uses: cachix/install-nix-action@v13 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v8 + - uses: cachix/cachix-action@v9 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -44,8 +44,8 @@ jobs: with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v12 - - uses: cachix/cachix-action@v8 + - uses: cachix/install-nix-action@v13 + - uses: cachix/cachix-action@v9 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -62,7 +62,7 @@ jobs: steps: - uses: actions/checkout@v2.3.4 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@master + - uses: cachix/install-nix-action@v13 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" From f7d9f7c3381acef38e4db2bb2f9e0287c289be54 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 1 Mar 2021 05:48:01 +0000 Subject: [PATCH 064/510] Pull out Buildable into its own file/header in libnixstore --- src/libcmd/installables.cc | 25 ------------------------- src/libcmd/installables.hh | 22 +--------------------- src/libstore/buildable.cc | 33 +++++++++++++++++++++++++++++++++ src/libstore/buildable.hh | 34 ++++++++++++++++++++++++++++++++++ 4 files changed, 68 insertions(+), 46 deletions(-) create mode 100644 src/libstore/buildable.cc create mode 100644 src/libstore/buildable.hh diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 898e642a5..ca416b9ee 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -20,31 +20,6 @@ namespace nix { -nlohmann::json BuildableOpaque::toJSON(ref store) const { - nlohmann::json res; - res["path"] = store->printStorePath(path); - return res; -} - -nlohmann::json BuildableFromDrv::toJSON(ref store) const { - nlohmann::json res; - res["drvPath"] = store->printStorePath(drvPath); - for (const auto& [output, path] : outputs) { - res["outputs"][output] = path ? store->printStorePath(*path) : ""; - } - return res; -} - -nlohmann::json buildablesToJSON(const Buildables & buildables, ref store) { - auto res = nlohmann::json::array(); - for (const Buildable & buildable : buildables) { - std::visit([&res, store](const auto & buildable) { - res.push_back(buildable.toJSON(store)); - }, buildable); - } - return res; -} - void completeFlakeInputPath( ref evalState, const FlakeRef & flakeRef, diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index b714f097b..d31afd3d5 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -2,13 +2,12 @@ #include "util.hh" #include "path.hh" +#include "buildable.hh" #include "eval.hh" #include "flake/flake.hh" #include -#include - namespace nix { struct DrvInfo; @@ -16,25 +15,6 @@ struct SourceExprCommand; namespace eval_cache { class EvalCache; class AttrCursor; } -struct BuildableOpaque { - StorePath path; - nlohmann::json toJSON(ref store) const; -}; - -struct BuildableFromDrv { - StorePath drvPath; - std::map> outputs; - nlohmann::json toJSON(ref store) const; -}; - -typedef std::variant< - BuildableOpaque, - BuildableFromDrv -> Buildable; - -typedef std::vector Buildables; -nlohmann::json buildablesToJSON(const Buildables & buildables, ref store); - struct App { std::vector context; diff --git a/src/libstore/buildable.cc b/src/libstore/buildable.cc new file mode 100644 index 000000000..5cba45b1d --- /dev/null +++ b/src/libstore/buildable.cc @@ -0,0 +1,33 @@ +#include "buildable.hh" +#include "store-api.hh" + +#include + +namespace nix { + +nlohmann::json BuildableOpaque::toJSON(ref store) const { + nlohmann::json res; + res["path"] = store->printStorePath(path); + return res; +} + +nlohmann::json BuildableFromDrv::toJSON(ref store) const { + nlohmann::json res; + res["drvPath"] = store->printStorePath(drvPath); + for (const auto& [output, path] : outputs) { + res["outputs"][output] = path ? store->printStorePath(*path) : ""; + } + return res; +} + +nlohmann::json buildablesToJSON(const Buildables & buildables, ref store) { + auto res = nlohmann::json::array(); + for (const Buildable & buildable : buildables) { + std::visit([&res, store](const auto & buildable) { + res.push_back(buildable.toJSON(store)); + }, buildable); + } + return res; +} + +} diff --git a/src/libstore/buildable.hh b/src/libstore/buildable.hh new file mode 100644 index 000000000..6177237be --- /dev/null +++ b/src/libstore/buildable.hh @@ -0,0 +1,34 @@ +#pragma once + +#include "util.hh" +#include "path.hh" + +#include + +#include + +namespace nix { + +class Store; + +struct BuildableOpaque { + StorePath path; + nlohmann::json toJSON(ref store) const; +}; + +struct BuildableFromDrv { + StorePath drvPath; + std::map> outputs; + nlohmann::json toJSON(ref store) const; +}; + +typedef std::variant< + BuildableOpaque, + BuildableFromDrv +> Buildable; + +typedef std::vector Buildables; + +nlohmann::json buildablesToJSON(const Buildables & buildables, ref store); + +} From 7a2b566dc8f0f94fdd6acbce90e47cd967f9f134 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Mar 2021 00:47:00 +0000 Subject: [PATCH 065/510] Move `StorePathWithOutputs` into its own header/file In the following commits it will become less prevalent. --- src/libcmd/installables.hh | 1 + src/libstore/derivations.cc | 8 ------- src/libstore/path-with-outputs.cc | 36 +++++++++++++++++++++++++++++++ src/libstore/path-with-outputs.hh | 17 +++++++++++++++ src/libstore/path.cc | 15 ------------- src/libstore/path.hh | 10 --------- src/libstore/store-api.cc | 7 ------ src/libstore/store-api.hh | 1 + 8 files changed, 55 insertions(+), 40 deletions(-) create mode 100644 src/libstore/path-with-outputs.cc create mode 100644 src/libstore/path-with-outputs.hh diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index d31afd3d5..e5c6fe208 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -2,6 +2,7 @@ #include "util.hh" #include "path.hh" +#include "path-with-outputs.hh" #include "buildable.hh" #include "eval.hh" #include "flake/flake.hh" diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index fe98182bb..f6defd98f 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -590,14 +590,6 @@ std::map staticOutputHashes(Store& store, const Derivation& d } -std::string StorePathWithOutputs::to_string(const Store & store) const -{ - return outputs.empty() - ? store.printStorePath(path) - : store.printStorePath(path) + "!" + concatStringsSep(",", outputs); -} - - bool wantOutput(const string & output, const std::set & wanted) { return wanted.empty() || wanted.find(output) != wanted.end(); diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc new file mode 100644 index 000000000..ba15df0a9 --- /dev/null +++ b/src/libstore/path-with-outputs.cc @@ -0,0 +1,36 @@ +#include "store-api.hh" + +namespace nix { + +std::string StorePathWithOutputs::to_string(const Store & store) const +{ + return outputs.empty() + ? store.printStorePath(path) + : store.printStorePath(path) + "!" + concatStringsSep(",", outputs); +} + + +std::pair parsePathWithOutputs(std::string_view s) +{ + size_t n = s.find("!"); + return n == s.npos + ? std::make_pair(s, std::set()) + : std::make_pair(((std::string_view) s).substr(0, n), + tokenizeString>(((std::string_view) s).substr(n + 1), ",")); +} + + +StorePathWithOutputs Store::parsePathWithOutputs(const std::string & s) +{ + auto [path, outputs] = nix::parsePathWithOutputs(s); + return {parseStorePath(path), std::move(outputs)}; +} + + +StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view path) const +{ + auto [path2, outputs] = nix::parsePathWithOutputs(path); + return StorePathWithOutputs { followLinksToStorePath(path2), std::move(outputs) }; +} + +} diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh new file mode 100644 index 000000000..a9e3fc7c2 --- /dev/null +++ b/src/libstore/path-with-outputs.hh @@ -0,0 +1,17 @@ +#pragma once + +#include "path.hh" + +namespace nix { + +struct StorePathWithOutputs +{ + StorePath path; + std::set outputs; + + std::string to_string(const Store & store) const; +}; + +std::pair parsePathWithOutputs(std::string_view s); + +} diff --git a/src/libstore/path.cc b/src/libstore/path.cc index dc9dc3897..e642abcd5 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -82,19 +82,4 @@ PathSet Store::printStorePathSet(const StorePathSet & paths) const return res; } -std::pair parsePathWithOutputs(std::string_view s) -{ - size_t n = s.find("!"); - return n == s.npos - ? std::make_pair(s, std::set()) - : std::make_pair(((std::string_view) s).substr(0, n), - tokenizeString>(((std::string_view) s).substr(n + 1), ",")); -} - -StorePathWithOutputs Store::parsePathWithOutputs(const std::string & s) -{ - auto [path, outputs] = nix::parsePathWithOutputs(s); - return {parseStorePath(path), std::move(outputs)}; -} - } diff --git a/src/libstore/path.hh b/src/libstore/path.hh index b03a0f69d..06ba0663b 100644 --- a/src/libstore/path.hh +++ b/src/libstore/path.hh @@ -69,16 +69,6 @@ typedef std::map> StorePathCAMap; /* Extension of derivations in the Nix store. */ const std::string drvExtension = ".drv"; -struct StorePathWithOutputs -{ - StorePath path; - std::set outputs; - - std::string to_string(const Store & store) const; -}; - -std::pair parsePathWithOutputs(std::string_view s); - } namespace std { diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 5e321cedf..e3500872c 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -53,13 +53,6 @@ StorePath Store::followLinksToStorePath(std::string_view path) const } -StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view path) const -{ - auto [path2, outputs] = nix::parsePathWithOutputs(path); - return StorePathWithOutputs { followLinksToStorePath(path2), std::move(outputs) }; -} - - /* Store paths have the following form: = /- diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 5d19e8949..7adbe3b17 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -2,6 +2,7 @@ #include "realisation.hh" #include "path.hh" +#include "path-with-outputs.hh" #include "hash.hh" #include "content-address.hh" #include "serialise.hh" From 32f4454b9fa3ac30d58e738ece322eb19a0728ba Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Mar 2021 01:06:08 +0000 Subject: [PATCH 066/510] Move `StorePathWithOutput` utilities out of store class These are by no means part of the notion of a store, but rather are things that happen to use stores. (Or put another way, there's no way we'd make them virtual methods any time soon.) It's better to move them out of that too-big class then. Also, this helps us remove StorePathWithOutputs from the Store interface altogether next commit. --- src/libexpr/get-drvs.cc | 2 +- src/libstore/daemon.cc | 4 ++-- src/libstore/path-with-outputs.cc | 12 ++++++------ src/libstore/path-with-outputs.hh | 9 +++++++++ src/libstore/store-api.hh | 7 ------- src/nix-store/nix-store.cc | 4 ++-- 6 files changed, 20 insertions(+), 18 deletions(-) diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 1a3990ea1..7793f26ff 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -19,7 +19,7 @@ DrvInfo::DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs) DrvInfo::DrvInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) : state(&state), attrs(nullptr), attrPath("") { - auto [drvPath, selectedOutputs] = store->parsePathWithOutputs(drvPathWithOutputs); + auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs); this->drvPath = store->printStorePath(drvPath); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index f28ab6438..48706bff8 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -495,7 +495,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopBuildPaths: { std::vector drvs; for (auto & s : readStrings(from)) - drvs.push_back(store->parsePathWithOutputs(s)); + drvs.push_back(parsePathWithOutputs(*store, s)); BuildMode mode = bmNormal; if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { mode = (BuildMode) readInt(from); @@ -861,7 +861,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopQueryMissing: { std::vector targets; for (auto & s : readStrings(from)) - targets.push_back(store->parsePathWithOutputs(s)); + targets.push_back(parsePathWithOutputs(*store, s)); logger->startWork(); StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index ba15df0a9..a898ad09c 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -20,17 +20,17 @@ std::pair parsePathWithOutputs(std::string_view s) } -StorePathWithOutputs Store::parsePathWithOutputs(const std::string & s) +StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view pathWithOutputs) { - auto [path, outputs] = nix::parsePathWithOutputs(s); - return {parseStorePath(path), std::move(outputs)}; + auto [path, outputs] = parsePathWithOutputs(pathWithOutputs); + return StorePathWithOutputs { store.parseStorePath(path), std::move(outputs) }; } -StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view path) const +StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs) { - auto [path2, outputs] = nix::parsePathWithOutputs(path); - return StorePathWithOutputs { followLinksToStorePath(path2), std::move(outputs) }; + auto [path, outputs] = parsePathWithOutputs(pathWithOutputs); + return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) }; } } diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index a9e3fc7c2..0e34b5aa1 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -14,4 +14,13 @@ struct StorePathWithOutputs std::pair parsePathWithOutputs(std::string_view s); +class Store; + +/* Split a string specifying a derivation and a set of outputs + (/nix/store/hash-foo!out1,out2,...) into the derivation path + and the outputs. */ +StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view pathWithOutputs); + +StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs); + } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 7adbe3b17..da7ac4460 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -262,11 +262,6 @@ public: PathSet printStorePathSet(const StorePathSet & path) const; - /* Split a string specifying a derivation and a set of outputs - (/nix/store/hash-foo!out1,out2,...) into the derivation path - and the outputs. */ - StorePathWithOutputs parsePathWithOutputs(const string & s); - /* Display a set of paths in human-readable form (i.e., between quotes and separated by commas). */ std::string showPaths(const StorePathSet & paths); @@ -290,8 +285,6 @@ public: result. */ StorePath followLinksToStorePath(std::string_view path) const; - StorePathWithOutputs followLinksToStorePathWithOutputs(std::string_view path) const; - /* Constructs a unique store path name. */ StorePath makeStorePath(std::string_view type, std::string_view hash, std::string_view name) const; diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index b684feccb..bfd1299fc 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -128,7 +128,7 @@ static void opRealise(Strings opFlags, Strings opArgs) std::vector paths; for (auto & i : opArgs) - paths.push_back(store->followLinksToStorePathWithOutputs(i)); + paths.push_back(followLinksToStorePathWithOutputs(*store, i)); uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; @@ -873,7 +873,7 @@ static void opServe(Strings opFlags, Strings opArgs) std::vector paths; for (auto & s : readStrings(in)) - paths.push_back(store->parsePathWithOutputs(s)); + paths.push_back(parsePathWithOutputs(*store, s)); getBuildSettings(); From 255d145ba7ac907d1cba8d088da556b591627756 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Mar 2021 03:50:41 +0000 Subject: [PATCH 067/510] Use `BuildableReq` for `buildPaths` and `ensurePath` This avoids an ambiguity where the `StorePathWithOutputs { drvPath, {} }` could mean "build `brvPath`" or "substitute `drvPath`" depending on context. It also brings the internals closer in line to the new CLI, by generalizing the `Buildable` type is used there and makes that distinction already. In doing so, relegate `StorePathWithOutputs` to being a type just for backwards compatibility (CLI and RPC). --- src/libcmd/installables.cc | 7 +-- src/libexpr/get-drvs.cc | 1 + src/libexpr/primops.cc | 12 +++-- src/libmain/shared.cc | 2 +- src/libmain/shared.hh | 3 +- src/libstore/build/derivation-goal.cc | 4 +- src/libstore/build/entry-points.cc | 16 +++--- src/libstore/build/local-derivation-goal.cc | 52 +++++++++++++------ src/libstore/build/local-derivation-goal.hh | 1 + src/libstore/build/worker.cc | 6 +-- src/libstore/buildable.cc | 47 +++++++++++++++++ src/libstore/buildable.hh | 29 +++++++++-- src/libstore/daemon.cc | 21 +++++--- src/libstore/legacy-ssh-store.cc | 16 ++++-- src/libstore/misc.cc | 49 +++++++++--------- src/libstore/path-with-outputs.cc | 35 +++++++++++++ src/libstore/path-with-outputs.hh | 9 ++++ src/libstore/remote-store.cc | 57 +++++++++++++++++---- src/libstore/remote-store.hh | 4 +- src/libstore/store-api.cc | 8 +-- src/libstore/store-api.hh | 6 +-- src/libstore/worker-protocol.hh | 22 ++++++++ src/nix-build/nix-build.cc | 4 +- src/nix-env/nix-env.cc | 30 ++++++----- src/nix-env/user-env.cc | 9 +++- src/nix-store/nix-store.cc | 11 ++-- src/nix/bundle.cc | 4 +- src/nix/develop.cc | 3 +- src/nix/flake.cc | 5 +- src/nix/profile.cc | 15 +++--- src/nix/run.cc | 2 +- 31 files changed, 364 insertions(+), 126 deletions(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index ca416b9ee..b68c5f6a7 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -679,19 +679,20 @@ Buildables build(ref store, Realise mode, Buildables buildables; - std::vector pathsToBuild; + std::vector pathsToBuild; for (auto & i : installables) { for (auto & b : i->toBuildables()) { std::visit(overloaded { [&](BuildableOpaque bo) { - pathsToBuild.push_back({bo.path}); + pathsToBuild.push_back(bo); }, [&](BuildableFromDrv bfd) { StringSet outputNames; for (auto & output : bfd.outputs) outputNames.insert(output.first); - pathsToBuild.push_back({bfd.drvPath, outputNames}); + pathsToBuild.push_back( + BuildableReqFromDrv{bfd.drvPath, outputNames}); }, }, b); buildables.push_back(std::move(b)); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 7793f26ff..f774e6493 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -2,6 +2,7 @@ #include "util.hh" #include "eval-inline.hh" #include "store-api.hh" +#include "path-with-outputs.hh" #include #include diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1d1afa768..24bc34b74 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -35,7 +35,7 @@ InvalidPathError::InvalidPathError(const Path & path) : void EvalState::realiseContext(const PathSet & context) { - std::vector drvs; + std::vector drvs; for (auto & i : context) { auto [ctxS, outputName] = decodeContext(i); @@ -43,7 +43,7 @@ void EvalState::realiseContext(const PathSet & context) if (!store->isValidPath(ctx)) throw InvalidPathError(store->printStorePath(ctx)); if (!outputName.empty() && ctx.isDerivation()) { - drvs.push_back(StorePathWithOutputs{ctx, {outputName}}); + drvs.push_back({ctx, {outputName}}); } } @@ -51,14 +51,16 @@ void EvalState::realiseContext(const PathSet & context) if (!evalSettings.enableImportFromDerivation) throw EvalError("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false", - store->printStorePath(drvs.begin()->path)); + store->printStorePath(drvs.begin()->drvPath)); /* For performance, prefetch all substitute info. */ StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; - store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); + std::vector buildReqs; + for (auto & d : drvs) buildReqs.emplace_back(BuildableReq { d }); + store->queryMissing(buildReqs, willBuild, willSubstitute, unknown, downloadSize, narSize); - store->buildPaths(drvs); + store->buildPaths(buildReqs); /* Add the output of this derivations to the allowed paths. */ diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 5baaff3e9..20027e099 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -36,7 +36,7 @@ void printGCWarning() } -void printMissing(ref store, const std::vector & paths, Verbosity lvl) +void printMissing(ref store, const std::vector & paths, Verbosity lvl) { uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index edc7b5efa..18e0fb57d 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -4,6 +4,7 @@ #include "args.hh" #include "common-args.hh" #include "path.hh" +#include "buildable.hh" #include @@ -42,7 +43,7 @@ struct StorePathWithOutputs; void printMissing( ref store, - const std::vector & paths, + const std::vector & paths, Verbosity lvl = lvlInfo); void printMissing(ref store, const StorePathSet & willBuild, diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 2e7be517e..8680d0bce 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -73,7 +73,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, state = &DerivationGoal::getDerivation; name = fmt( "building of '%s' from .drv file", - StorePathWithOutputs { drvPath, wantedOutputs }.to_string(worker.store)); + to_string(worker.store, BuildableReqFromDrv { drvPath, wantedOutputs })); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -94,7 +94,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation state = &DerivationGoal::haveDerivation; name = fmt( "building of '%s' from in-memory derivation", - StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store)); + to_string(worker.store, BuildableReqFromDrv { drvPath, drv.outputNames() })); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 686364440..d1973d78b 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -6,16 +6,20 @@ namespace nix { -void Store::buildPaths(const std::vector & drvPaths, BuildMode buildMode) +void Store::buildPaths(const std::vector & reqs, BuildMode buildMode) { Worker worker(*this); Goals goals; - for (auto & path : drvPaths) { - if (path.path.isDerivation()) - goals.insert(worker.makeDerivationGoal(path.path, path.outputs, buildMode)); - else - goals.insert(worker.makePathSubstitutionGoal(path.path, buildMode == bmRepair ? Repair : NoRepair)); + for (auto & br : reqs) { + std::visit(overloaded { + [&](BuildableReqFromDrv bfd) { + goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode)); + }, + [&](BuildableOpaque bo) { + goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair)); + }, + }, br); } worker.run(goals); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 8ef43c225..c245527c9 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1190,6 +1190,26 @@ void LocalDerivationGoal::writeStructuredAttrs() chownToBuilder(tmpDir + "/.attrs.sh"); } + +static StorePath pathPartOfReq(const BuildableReq & req) +{ + return std::visit(overloaded { + [&](BuildableOpaque bo) { + return bo.path; + }, + [&](BuildableReqFromDrv bfd) { + return bfd.drvPath; + }, + }, req); +} + + +bool LocalDerivationGoal::isAllowed(const BuildableReq & req) +{ + return this->isAllowed(pathPartOfReq(req)); +} + + struct RestrictedStoreConfig : virtual LocalFSStoreConfig { using LocalFSStoreConfig::LocalFSStoreConfig; @@ -1312,25 +1332,27 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo // an allowed derivation { throw Error("queryRealisation"); } - void buildPaths(const std::vector & paths, BuildMode buildMode) override + void buildPaths(const std::vector & paths, BuildMode buildMode) override { if (buildMode != bmNormal) throw Error("unsupported build mode"); StorePathSet newPaths; - for (auto & path : paths) { - if (!goal.isAllowed(path.path)) - throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); + for (auto & req : paths) { + if (!goal.isAllowed(req)) + throw InvalidPath("cannot build '%s' in recursive Nix because path is unknown", to_string(*next, req)); } next->buildPaths(paths, buildMode); for (auto & path : paths) { - if (!path.path.isDerivation()) continue; - auto outputs = next->queryDerivationOutputMap(path.path); - for (auto & output : outputs) - if (wantOutput(output.first, path.outputs)) - newPaths.insert(output.second); + auto p = std::get_if(&path); + if (!p) continue; + auto & bfd = *p; + auto outputs = next->queryDerivationOutputMap(bfd.drvPath); + for (auto & [outputName, outputPath] : outputs) + if (wantOutput(outputName, bfd.outputs)) + newPaths.insert(outputPath); } StorePathSet closure; @@ -1358,7 +1380,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo void addSignatures(const StorePath & storePath, const StringSet & sigs) override { unsupported("addSignatures"); } - void queryMissing(const std::vector & targets, + void queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) override { @@ -1366,12 +1388,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo client about what paths will be built/substituted or are already present. Probably not a big deal. */ - std::vector allowed; - for (auto & path : targets) { - if (goal.isAllowed(path.path)) - allowed.emplace_back(path); + std::vector allowed; + for (auto & req : targets) { + if (goal.isAllowed(req)) + allowed.emplace_back(req); else - unknown.insert(path.path); + unknown.insert(pathPartOfReq(req)); } next->queryMissing(allowed, willBuild, willSubstitute, diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 47b818a8b..edb93f84e 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -116,6 +116,7 @@ struct LocalDerivationGoal : public DerivationGoal { return inputPaths.count(path) || addedPaths.count(path); } + bool isAllowed(const BuildableReq & req); friend struct RestrictedStore; diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 616b17e61..fef4cb0cb 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -226,14 +226,14 @@ void Worker::waitForAWhile(GoalPtr goal) void Worker::run(const Goals & _topGoals) { - std::vector topPaths; + std::vector topPaths; for (auto & i : _topGoals) { topGoals.insert(i); if (auto goal = dynamic_cast(i.get())) { - topPaths.push_back({goal->drvPath, goal->wantedOutputs}); + topPaths.push_back(BuildableReqFromDrv{goal->drvPath, goal->wantedOutputs}); } else if (auto goal = dynamic_cast(i.get())) { - topPaths.push_back({goal->storePath}); + topPaths.push_back(BuildableOpaque{goal->storePath}); } } diff --git a/src/libstore/buildable.cc b/src/libstore/buildable.cc index 5cba45b1d..63ca1779e 100644 --- a/src/libstore/buildable.cc +++ b/src/libstore/buildable.cc @@ -11,6 +11,7 @@ nlohmann::json BuildableOpaque::toJSON(ref store) const { return res; } +template<> nlohmann::json BuildableFromDrv::toJSON(ref store) const { nlohmann::json res; res["drvPath"] = store->printStorePath(drvPath); @@ -30,4 +31,50 @@ nlohmann::json buildablesToJSON(const Buildables & buildables, ref store) return res; } + +std::string BuildableOpaque::to_string(const Store & store) const { + return store.printStorePath(path); +} + +template<> +std::string BuildableReqFromDrv::to_string(const Store & store) const { + return store.printStorePath(drvPath) + + "!" + + (outputs.empty() ? std::string { "*" } : concatStringsSep(",", outputs)); +} + +std::string to_string(const Store & store, const BuildableReq & req) +{ + return std::visit( + [&](const auto & req) { return req.to_string(store); }, + req); +} + + +BuildableOpaque BuildableOpaque::parse(const Store & store, std::string_view s) +{ + return {store.parseStorePath(s)}; +} + +template<> +BuildableReqFromDrv BuildableReqFromDrv::parse(const Store & store, std::string_view s) +{ + size_t n = s.find("!"); + assert(n != s.npos); + auto drvPath = store.parseStorePath(s.substr(0, n)); + auto outputsS = s.substr(n + 1); + std::set outputs; + if (outputsS != "*") + outputs = tokenizeString>(outputsS); + return {drvPath, outputs}; +} + +BuildableReq parseBuildableReq(const Store & store, std::string_view s) +{ + size_t n = s.find("!"); + return n == s.npos + ? (BuildableReq) BuildableOpaque::parse(store, s) + : (BuildableReq) BuildableReqFromDrv::parse(store, s); +} + } diff --git a/src/libstore/buildable.hh b/src/libstore/buildable.hh index 6177237be..db78316bd 100644 --- a/src/libstore/buildable.hh +++ b/src/libstore/buildable.hh @@ -2,6 +2,7 @@ #include "util.hh" #include "path.hh" +#include "path.hh" #include @@ -13,19 +14,37 @@ class Store; struct BuildableOpaque { StorePath path; + nlohmann::json toJSON(ref store) const; + std::string to_string(const Store & store) const; + static BuildableOpaque parse(const Store & store, std::string_view); }; -struct BuildableFromDrv { +template +struct BuildableForFromDrv { StorePath drvPath; - std::map> outputs; + Outputs outputs; + nlohmann::json toJSON(ref store) const; + std::string to_string(const Store & store) const; + static BuildableForFromDrv parse(const Store & store, std::string_view); }; -typedef std::variant< +template +using BuildableFor = std::variant< BuildableOpaque, - BuildableFromDrv -> Buildable; + BuildableForFromDrv +>; + +typedef BuildableForFromDrv> BuildableReqFromDrv; +typedef BuildableFor> BuildableReq; + +std::string to_string(const Store & store, const BuildableReq &); + +BuildableReq parseBuildableReq(const Store & store, std::string_view); + +typedef BuildableForFromDrv>> BuildableFromDrv; +typedef BuildableFor>> Buildable; typedef std::vector Buildables; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 48706bff8..6b527dcb2 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -2,6 +2,7 @@ #include "monitor-fd.hh" #include "worker-protocol.hh" #include "store-api.hh" +#include "path-with-outputs.hh" #include "finally.hh" #include "affinity.hh" #include "archive.hh" @@ -259,6 +260,18 @@ static void writeValidPathInfo( } } +static std::vector readBuildableReqs(Store & store, unsigned int clientVersion, Source & from) +{ + std::vector reqs; + if (GET_PROTOCOL_MINOR(clientVersion) >= 29) { + reqs = worker_proto::read(store, from, Phantom> {}); + } else { + for (auto & s : readStrings(from)) + reqs.push_back(parsePathWithOutputs(store, s).toBuildableReq()); + } + return reqs; +} + static void performOp(TunnelLogger * logger, ref store, TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion, Source & from, BufferedSink & to, unsigned int op) @@ -493,9 +506,7 @@ static void performOp(TunnelLogger * logger, ref store, } case wopBuildPaths: { - std::vector drvs; - for (auto & s : readStrings(from)) - drvs.push_back(parsePathWithOutputs(*store, s)); + auto drvs = readBuildableReqs(*store, clientVersion, from); BuildMode mode = bmNormal; if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { mode = (BuildMode) readInt(from); @@ -859,9 +870,7 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryMissing: { - std::vector targets; - for (auto & s : readStrings(from)) - targets.push_back(parsePathWithOutputs(*store, s)); + auto targets = readBuildableReqs(*store, clientVersion, from); logger->startWork(); StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index a9f53bad9..1cb977be6 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -3,6 +3,7 @@ #include "remote-store.hh" #include "serve-protocol.hh" #include "store-api.hh" +#include "path-with-outputs.hh" #include "worker-protocol.hh" #include "ssh.hh" #include "derivations.hh" @@ -266,14 +267,23 @@ public: return status; } - void buildPaths(const std::vector & drvPaths, BuildMode buildMode) override + void buildPaths(const std::vector & drvPaths, BuildMode buildMode) override { auto conn(connections->get()); conn->to << cmdBuildPaths; Strings ss; - for (auto & p : drvPaths) - ss.push_back(p.to_string(*this)); + for (auto & p : drvPaths) { + auto sOrDrvPath = StorePathWithOutputs::tryFromBuildableReq(p); + std::visit(overloaded { + [&](StorePathWithOutputs s) { + ss.push_back(s.to_string(*this)); + }, + [&](StorePath drvPath) { + throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); + }, + }, sOrDrvPath); + } conn->to << ss; putBuildSettings(*conn); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index f58816ad8..e702a4f9e 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -117,7 +117,7 @@ std::optional getDerivationCA(const BasicDerivation & drv) return std::nullopt; } -void Store::queryMissing(const std::vector & targets, +void Store::queryMissing(const std::vector & targets, StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_, uint64_t & downloadSize_, uint64_t & narSize_) { @@ -145,7 +145,7 @@ void Store::queryMissing(const std::vector & targets, Sync state_(State{{}, unknown_, willSubstitute_, willBuild_, downloadSize_, narSize_}); - std::function doPath; + std::function doPath; auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) { { @@ -154,7 +154,7 @@ void Store::queryMissing(const std::vector & targets, } for (auto & i : drv.inputDrvs) - pool.enqueue(std::bind(doPath, StorePathWithOutputs { i.first, i.second })); + pool.enqueue(std::bind(doPath, BuildableReqFromDrv { i.first, i.second })); }; auto checkOutput = [&]( @@ -177,24 +177,25 @@ void Store::queryMissing(const std::vector & targets, drvState->outPaths.insert(outPath); if (!drvState->left) { for (auto & path : drvState->outPaths) - pool.enqueue(std::bind(doPath, StorePathWithOutputs { path } )); + pool.enqueue(std::bind(doPath, BuildableOpaque { path } )); } } } }; - doPath = [&](const StorePathWithOutputs & path) { + doPath = [&](const BuildableReq & req) { { auto state(state_.lock()); - if (!state->done.insert(path.to_string(*this)).second) return; + if (!state->done.insert(to_string(*this, req)).second) return; } - if (path.path.isDerivation()) { - if (!isValidPath(path.path)) { + std::visit(overloaded { + [&](BuildableReqFromDrv bfd) { + if (!isValidPath(bfd.drvPath)) { // FIXME: we could try to substitute the derivation. auto state(state_.lock()); - state->unknown.insert(path.path); + state->unknown.insert(bfd.drvPath); return; } @@ -202,52 +203,54 @@ void Store::queryMissing(const std::vector & targets, /* true for regular derivations, and CA derivations for which we have a trust mapping for all wanted outputs. */ auto knownOutputPaths = true; - for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(path.path)) { + for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(bfd.drvPath)) { if (!pathOpt) { knownOutputPaths = false; break; } - if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt)) + if (wantOutput(outputName, bfd.outputs) && !isValidPath(*pathOpt)) invalid.insert(*pathOpt); } if (knownOutputPaths && invalid.empty()) return; - auto drv = make_ref(derivationFromPath(path.path)); - ParsedDerivation parsedDrv(StorePath(path.path), *drv); + auto drv = make_ref(derivationFromPath(bfd.drvPath)); + ParsedDerivation parsedDrv(StorePath(bfd.drvPath), *drv); if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) { auto drvState = make_ref>(DrvState(invalid.size())); for (auto & output : invalid) - pool.enqueue(std::bind(checkOutput, path.path, drv, output, drvState)); + pool.enqueue(std::bind(checkOutput, bfd.drvPath, drv, output, drvState)); } else - mustBuildDrv(path.path, *drv); + mustBuildDrv(bfd.drvPath, *drv); - } else { + }, + [&](BuildableOpaque bo) { - if (isValidPath(path.path)) return; + if (isValidPath(bo.path)) return; SubstitutablePathInfos infos; - querySubstitutablePathInfos({{path.path, std::nullopt}}, infos); + querySubstitutablePathInfos({{bo.path, std::nullopt}}, infos); if (infos.empty()) { auto state(state_.lock()); - state->unknown.insert(path.path); + state->unknown.insert(bo.path); return; } - auto info = infos.find(path.path); + auto info = infos.find(bo.path); assert(info != infos.end()); { auto state(state_.lock()); - state->willSubstitute.insert(path.path); + state->willSubstitute.insert(bo.path); state->downloadSize += info->second.downloadSize; state->narSize += info->second.narSize; } for (auto & ref : info->second.references) - pool.enqueue(std::bind(doPath, StorePathWithOutputs { ref })); - } + pool.enqueue(std::bind(doPath, BuildableOpaque { ref })); + }, + }, req); }; for (auto & path : targets) diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index a898ad09c..353286ac6 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -1,3 +1,4 @@ +#include "path-with-outputs.hh" #include "store-api.hh" namespace nix { @@ -10,6 +11,40 @@ std::string StorePathWithOutputs::to_string(const Store & store) const } +BuildableReq StorePathWithOutputs::toBuildableReq() const +{ + if (!outputs.empty() || path.isDerivation()) + return BuildableReqFromDrv { path, outputs }; + else + return BuildableOpaque { path }; +} + + +std::vector toBuildableReqs(const std::vector ss) +{ + std::vector reqs; + for (auto & s : ss) reqs.push_back(s.toBuildableReq()); + return reqs; +} + + +std::variant StorePathWithOutputs::tryFromBuildableReq(const BuildableReq & p) +{ + return std::visit(overloaded { + [&](BuildableOpaque bo) -> std::variant { + if (bo.path.isDerivation()) { + // drv path gets interpreted as "build", not "get drv file itself" + return bo.path; + } + return StorePathWithOutputs { bo.path }; + }, + [&](BuildableReqFromDrv bfd) -> std::variant { + return StorePathWithOutputs { bfd.drvPath, bfd.outputs }; + }, + }, p); +} + + std::pair parsePathWithOutputs(std::string_view s) { size_t n = s.find("!"); diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index 0e34b5aa1..870cac08e 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -1,6 +1,9 @@ #pragma once +#include + #include "path.hh" +#include "buildable.hh" namespace nix { @@ -10,8 +13,14 @@ struct StorePathWithOutputs std::set outputs; std::string to_string(const Store & store) const; + + BuildableReq toBuildableReq() const; + + static std::variant tryFromBuildableReq(const BuildableReq &); }; +std::vector toBuildableReqs(const std::vector); + std::pair parsePathWithOutputs(std::string_view s); class Store; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index ccf095dc2..de1c95ed6 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -1,5 +1,6 @@ #include "serialise.hh" #include "util.hh" +#include "path-with-outputs.hh" #include "remote-fs-accessor.hh" #include "remote-store.hh" #include "worker-protocol.hh" @@ -50,6 +51,19 @@ void write(const Store & store, Sink & out, const ContentAddress & ca) out << renderContentAddress(ca); } + +BuildableReq read(const Store & store, Source & from, Phantom _) +{ + auto s = readString(from); + return parseBuildableReq(store, s); +} + +void write(const Store & store, Sink & out, const BuildableReq & req) +{ + out << to_string(store, req); +} + + Realisation read(const Store & store, Source & from, Phantom _) { std::string rawInput = readString(from); @@ -58,8 +72,12 @@ Realisation read(const Store & store, Source & from, Phantom _) "remote-protocol" ); } + void write(const Store & store, Sink & out, const Realisation & realisation) -{ out << realisation.toJSON().dump(); } +{ + out << realisation.toJSON().dump(); +} + DrvOutput read(const Store & store, Source & from, Phantom _) { @@ -652,16 +670,36 @@ std::optional RemoteStore::queryRealisation(const DrvOutput & return {Realisation{.id = id, .outPath = *outPaths.begin()}}; } +static void writeBuildableReqs(RemoteStore & store, ConnectionHandle & conn, const std::vector & reqs) +{ + if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) { + worker_proto::write(store, conn->to, reqs); + } else { + Strings ss; + for (auto & p : reqs) { + auto sOrDrvPath = StorePathWithOutputs::tryFromBuildableReq(p); + std::visit(overloaded { + [&](StorePathWithOutputs s) { + ss.push_back(s.to_string(store)); + }, + [&](StorePath drvPath) { + throw Error("trying to request '%s', but daemon protocol %d.%d is too old (< 1.29) to request a derivation file", + store.printStorePath(drvPath), + GET_PROTOCOL_MAJOR(conn->daemonVersion), + GET_PROTOCOL_MINOR(conn->daemonVersion)); + }, + }, sOrDrvPath); + } + conn->to << ss; + } +} -void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode) +void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode) { auto conn(getConnection()); conn->to << wopBuildPaths; assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13); - Strings ss; - for (auto & p : drvPaths) - ss.push_back(p.to_string(*this)); - conn->to << ss; + writeBuildableReqs(*this, conn, drvPaths); if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15) conn->to << buildMode; else @@ -800,7 +838,7 @@ void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & s } -void RemoteStore::queryMissing(const std::vector & targets, +void RemoteStore::queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) { @@ -811,10 +849,7 @@ void RemoteStore::queryMissing(const std::vector & targets // to prevent a deadlock. goto fallback; conn->to << wopQueryMissing; - Strings ss; - for (auto & p : targets) - ss.push_back(p.to_string(*this)); - conn->to << ss; + writeBuildableReqs(*this, conn, targets); conn.processStderr(); willBuild = worker_proto::read(*this, conn->from, Phantom {}); willSubstitute = worker_proto::read(*this, conn->from, Phantom {}); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index b3a9910a3..20d366038 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -85,7 +85,7 @@ public: std::optional queryRealisation(const DrvOutput &) override; - void buildPaths(const std::vector & paths, BuildMode buildMode) override; + void buildPaths(const std::vector & paths, BuildMode buildMode) override; BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; @@ -108,7 +108,7 @@ public: void addSignatures(const StorePath & storePath, const StringSet & sigs) override; - void queryMissing(const std::vector & targets, + void queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index e3500872c..8b60bdc62 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -529,10 +529,10 @@ void Store::queryPathInfo(const StorePath & storePath, void Store::substitutePaths(const StorePathSet & paths) { - std::vector paths2; + std::vector paths2; for (auto & path : paths) if (!path.isDerivation()) - paths2.push_back({path}); + paths2.push_back(BuildableOpaque{path}); uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; queryMissing(paths2, @@ -540,8 +540,8 @@ void Store::substitutePaths(const StorePathSet & paths) if (!willSubstitute.empty()) try { - std::vector subs; - for (auto & p : willSubstitute) subs.push_back({p}); + std::vector subs; + for (auto & p : willSubstitute) subs.push_back(BuildableOpaque{p}); buildPaths(subs); } catch (Error & e) { logWarning(e.info()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index da7ac4460..59d0983df 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -2,7 +2,7 @@ #include "realisation.hh" #include "path.hh" -#include "path-with-outputs.hh" +#include "buildable.hh" #include "hash.hh" #include "content-address.hh" #include "serialise.hh" @@ -494,7 +494,7 @@ public: recursively building any sub-derivations. For inputs that are not derivations, substitute them. */ virtual void buildPaths( - const std::vector & paths, + const std::vector & paths, BuildMode buildMode = bmNormal); /* Build a single non-materialized derivation (i.e. not from an @@ -656,7 +656,7 @@ public: /* Given a set of paths that are to be built, return the set of derivations that will be built, and the set of output paths that will be substituted. */ - virtual void queryMissing(const std::vector & targets, + virtual void queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize); diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index be071dd78..0255726ac 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -86,9 +86,11 @@ namespace worker_proto { MAKE_WORKER_PROTO(, std::string); MAKE_WORKER_PROTO(, StorePath); MAKE_WORKER_PROTO(, ContentAddress); +MAKE_WORKER_PROTO(, BuildableReq); MAKE_WORKER_PROTO(, Realisation); MAKE_WORKER_PROTO(, DrvOutput); +MAKE_WORKER_PROTO(template, std::vector); MAKE_WORKER_PROTO(template, std::set); #define X_ template @@ -113,6 +115,26 @@ MAKE_WORKER_PROTO(X_, Y_); MAKE_WORKER_PROTO(, std::optional); MAKE_WORKER_PROTO(, std::optional); +template +std::vector read(const Store & store, Source & from, Phantom> _) +{ + std::vector resSet; + auto size = readNum(from); + while (size--) { + resSet.push_back(read(store, from, Phantom {})); + } + return resSet; +} + +template +void write(const Store & store, Sink & out, const std::vector & resSet) +{ + out << resSet.size(); + for (auto & key : resSet) { + write(store, out, key); + } +} + template std::set read(const Store & store, Source & from, Phantom> _) { diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 65b85b304..6f8a61261 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -12,6 +12,7 @@ #include "affinity.hh" #include "util.hh" #include "shared.hh" +#include "path-with-outputs.hh" #include "eval.hh" #include "eval-inline.hh" #include "get-drvs.hh" @@ -321,7 +322,8 @@ static void main_nix_build(int argc, char * * argv) state->printStats(); - auto buildPaths = [&](const std::vector & paths) { + auto buildPaths = [&](const std::vector & paths0) { + auto paths = toBuildableReqs(paths0); /* Note: we do this even when !printMissing to efficiently fetch binary cache data. */ uint64_t downloadSize, narSize; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 0f10a4cbb..af1c69b87 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -6,6 +6,7 @@ #include "globals.hh" #include "names.hh" #include "profiles.hh" +#include "path-with-outputs.hh" #include "shared.hh" #include "store-api.hh" #include "local-fs-store.hh" @@ -418,13 +419,13 @@ static void queryInstSources(EvalState & state, static void printMissing(EvalState & state, DrvInfos & elems) { - std::vector targets; + std::vector targets; for (auto & i : elems) { Path drvPath = i.queryDrvPath(); if (drvPath != "") - targets.push_back({state.store->parseStorePath(drvPath)}); + targets.push_back(BuildableReqFromDrv{state.store->parseStorePath(drvPath)}); else - targets.push_back({state.store->parseStorePath(i.queryOutPath())}); + targets.push_back(BuildableOpaque{state.store->parseStorePath(i.queryOutPath())}); } printMissing(state.store, targets); @@ -693,17 +694,18 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) if (globals.forceName != "") drv.setName(globals.forceName); - if (drv.queryDrvPath() != "") { - std::vector paths{{globals.state->store->parseStorePath(drv.queryDrvPath())}}; - printMissing(globals.state->store, paths); - if (globals.dryRun) return; - globals.state->store->buildPaths(paths, globals.state->repair ? bmRepair : bmNormal); - } else { - printMissing(globals.state->store, - {{globals.state->store->parseStorePath(drv.queryOutPath())}}); - if (globals.dryRun) return; - globals.state->store->ensurePath(globals.state->store->parseStorePath(drv.queryOutPath())); - } + std::vector paths { + (drv.queryDrvPath() != "") + ? (BuildableReq) (BuildableReqFromDrv { + globals.state->store->parseStorePath(drv.queryDrvPath()) + }) + : (BuildableReq) (BuildableOpaque { + globals.state->store->parseStorePath(drv.queryOutPath()) + }), + }; + printMissing(globals.state->store, paths); + if (globals.dryRun) return; + globals.state->store->buildPaths(paths, globals.state->repair ? bmRepair : bmNormal); debug(format("switching to new user environment")); Path generation = createGeneration( diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 168ac492b..0ccf960fb 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -2,6 +2,7 @@ #include "util.hh" #include "derivations.hh" #include "store-api.hh" +#include "path-with-outputs.hh" #include "local-fs-store.hh" #include "globals.hh" #include "shared.hh" @@ -41,7 +42,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, drvsToBuild.push_back({state.store->parseStorePath(i.queryDrvPath())}); debug(format("building user environment dependencies")); - state.store->buildPaths(drvsToBuild, state.repair ? bmRepair : bmNormal); + state.store->buildPaths( + toBuildableReqs(drvsToBuild), + state.repair ? bmRepair : bmNormal); /* Construct the whole top level derivation. */ StorePathSet references; @@ -136,7 +139,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, debug("building user environment"); std::vector topLevelDrvs; topLevelDrvs.push_back({topLevelDrv}); - state.store->buildPaths(topLevelDrvs, state.repair ? bmRepair : bmNormal); + state.store->buildPaths( + toBuildableReqs(topLevelDrvs), + state.repair ? bmRepair : bmNormal); /* Switch the current user environment to the output path. */ auto store2 = state.store.dynamic_pointer_cast(); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index bfd1299fc..21c1e547b 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -10,6 +10,7 @@ #include "worker-protocol.hh" #include "graphml.hh" #include "legacy.hh" +#include "path-with-outputs.hh" #include #include @@ -62,7 +63,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) auto store2 = std::dynamic_pointer_cast(store); if (path.path.isDerivation()) { - if (build) store->buildPaths({path}); + if (build) store->buildPaths({path.toBuildableReq()}); auto outputPaths = store->queryDerivationOutputMap(path.path); Derivation drv = store->derivationFromPath(path.path); rootNr++; @@ -132,7 +133,9 @@ static void opRealise(Strings opFlags, Strings opArgs) uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; - store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize); + store->queryMissing( + toBuildableReqs(paths), + willBuild, willSubstitute, unknown, downloadSize, narSize); if (ignoreUnknown) { std::vector paths2; @@ -148,7 +151,7 @@ static void opRealise(Strings opFlags, Strings opArgs) if (dryRun) return; /* Build all paths at the same time to exploit parallelism. */ - store->buildPaths(paths, buildMode); + store->buildPaths(toBuildableReqs(paths), buildMode); if (!ignoreUnknown) for (auto & i : paths) { @@ -879,7 +882,7 @@ static void opServe(Strings opFlags, Strings opArgs) try { MonitorFdHup monitor(in.fd); - store->buildPaths(paths); + store->buildPaths(toBuildableReqs(paths)); out << 0; } catch (Error & e) { assert(e.status); diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 48f4eb6e3..e86fbb3f7 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -70,7 +70,7 @@ struct CmdBundle : InstallableCommand auto evalState = getEvalState(); auto app = installable->toApp(*evalState); - store->buildPaths(app.context); + store->buildPaths(toBuildableReqs(app.context)); auto [bundlerFlakeRef, bundlerName] = parseFlakeRefWithFragment(bundler, absPath(".")); const flake::LockFlags lockFlags{ .writeLockFile = false }; @@ -110,7 +110,7 @@ struct CmdBundle : InstallableCommand StorePath outPath = store->parseStorePath(evalState->coerceToPath(*attr2->pos, *attr2->value, context2)); - store->buildPaths({{drvPath}}); + store->buildPaths({ BuildableReqFromDrv { drvPath } }); auto outPathS = store->printStorePath(outPath); diff --git a/src/nix/develop.cc b/src/nix/develop.cc index d0b140570..616e2073e 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -3,6 +3,7 @@ #include "common-args.hh" #include "shared.hh" #include "store-api.hh" +#include "path-with-outputs.hh" #include "derivations.hh" #include "affinity.hh" #include "progress-bar.hh" @@ -159,7 +160,7 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath) auto shellDrvPath = writeDerivation(*store, drv); /* Build the derivation. */ - store->buildPaths({{shellDrvPath}}); + store->buildPaths({BuildableReqFromDrv{shellDrvPath}}); for (auto & [_0, outputAndOptPath] : drv.outputsAndOptPaths(*store)) { auto & [_1, optPath] = outputAndOptPath; diff --git a/src/nix/flake.cc b/src/nix/flake.cc index a2b6c0303..9d6d22a43 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -7,6 +7,7 @@ #include "get-drvs.hh" #include "store-api.hh" #include "derivations.hh" +#include "path-with-outputs.hh" #include "attr-path.hh" #include "fetchers.hh" #include "registry.hh" @@ -292,7 +293,7 @@ struct CmdFlakeCheck : FlakeCommand } }; - std::vector drvPaths; + std::vector drvPaths; auto checkApp = [&](const std::string & attrPath, Value & v, const Pos & pos) { try { @@ -461,7 +462,7 @@ struct CmdFlakeCheck : FlakeCommand fmt("%s.%s.%s", name, attr.name, attr2.name), *attr2.value, *attr2.pos); if ((std::string) attr.name == settings.thisSystem.get()) - drvPaths.push_back({drvPath}); + drvPaths.push_back(BuildableReqFromDrv{drvPath}); } } } diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 4d275f577..b96e71844 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -233,7 +233,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile { ProfileManifest manifest(*getEvalState(), *profile); - std::vector pathsToBuild; + std::vector pathsToBuild; for (auto & installable : installables) { if (auto installable2 = std::dynamic_pointer_cast(installable)) { @@ -249,7 +249,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile attrPath, }; - pathsToBuild.push_back({drv.drvPath, StringSet{drv.outputName}}); + pathsToBuild.push_back(BuildableReqFromDrv{drv.drvPath, StringSet{drv.outputName}}); manifest.elements.emplace_back(std::move(element)); } else { @@ -260,12 +260,15 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile std::visit(overloaded { [&](BuildableOpaque bo) { - pathsToBuild.push_back({bo.path, {}}); + pathsToBuild.push_back(bo); element.storePaths.insert(bo.path); }, [&](BuildableFromDrv bfd) { + // TODO: Why are we querying if we know the output + // names already? Is it just to figure out what the + // default one is? for (auto & output : store->queryDerivationOutputMap(bfd.drvPath)) { - pathsToBuild.push_back({bfd.drvPath, {output.first}}); + pathsToBuild.push_back(BuildableReqFromDrv{bfd.drvPath, {output.first}}); element.storePaths.insert(output.second); } }, @@ -388,7 +391,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf auto matchers = getMatchers(store); // FIXME: code duplication - std::vector pathsToBuild; + std::vector pathsToBuild; for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); @@ -423,7 +426,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf attrPath, }; - pathsToBuild.push_back({drv.drvPath, StringSet{"out"}}); // FIXME + pathsToBuild.push_back(BuildableReqFromDrv{drv.drvPath, {"out"}}); // FIXME } } diff --git a/src/nix/run.cc b/src/nix/run.cc index ec9388234..2e9bb41cc 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -182,7 +182,7 @@ struct CmdRun : InstallableCommand, RunCommon auto app = installable->toApp(*state); - state->store->buildPaths(app.context); + state->store->buildPaths(toBuildableReqs(app.context)); Strings allArgs{app.program}; for (auto & i : args) allArgs.push_back(i); From 4fe41c6db390c0295d20f6365ebedaec8ec79e1d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 5 Apr 2021 09:15:25 -0400 Subject: [PATCH 068/510] No templates for `Buildable` and `BuildableReq` --- src/libstore/buildable.cc | 3 --- src/libstore/buildable.hh | 30 +++++++++++++++++------------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/src/libstore/buildable.cc b/src/libstore/buildable.cc index 63ca1779e..7892b94e4 100644 --- a/src/libstore/buildable.cc +++ b/src/libstore/buildable.cc @@ -11,7 +11,6 @@ nlohmann::json BuildableOpaque::toJSON(ref store) const { return res; } -template<> nlohmann::json BuildableFromDrv::toJSON(ref store) const { nlohmann::json res; res["drvPath"] = store->printStorePath(drvPath); @@ -36,7 +35,6 @@ std::string BuildableOpaque::to_string(const Store & store) const { return store.printStorePath(path); } -template<> std::string BuildableReqFromDrv::to_string(const Store & store) const { return store.printStorePath(drvPath) + "!" @@ -56,7 +54,6 @@ BuildableOpaque BuildableOpaque::parse(const Store & store, std::string_view s) return {store.parseStorePath(s)}; } -template<> BuildableReqFromDrv BuildableReqFromDrv::parse(const Store & store, std::string_view s) { size_t n = s.find("!"); diff --git a/src/libstore/buildable.hh b/src/libstore/buildable.hh index db78316bd..54e627271 100644 --- a/src/libstore/buildable.hh +++ b/src/libstore/buildable.hh @@ -20,31 +20,35 @@ struct BuildableOpaque { static BuildableOpaque parse(const Store & store, std::string_view); }; -template -struct BuildableForFromDrv { +struct BuildableReqFromDrv { StorePath drvPath; - Outputs outputs; + std::set outputs; - nlohmann::json toJSON(ref store) const; std::string to_string(const Store & store) const; - static BuildableForFromDrv parse(const Store & store, std::string_view); + static BuildableReqFromDrv parse(const Store & store, std::string_view); }; -template -using BuildableFor = std::variant< +using BuildableReq = std::variant< BuildableOpaque, - BuildableForFromDrv + BuildableReqFromDrv >; -typedef BuildableForFromDrv> BuildableReqFromDrv; -typedef BuildableFor> BuildableReq; - std::string to_string(const Store & store, const BuildableReq &); BuildableReq parseBuildableReq(const Store & store, std::string_view); -typedef BuildableForFromDrv>> BuildableFromDrv; -typedef BuildableFor>> Buildable; +struct BuildableFromDrv { + StorePath drvPath; + std::map> outputs; + + nlohmann::json toJSON(ref store) const; + static BuildableFromDrv parse(const Store & store, std::string_view); +}; + +using Buildable = std::variant< + BuildableOpaque, + BuildableFromDrv +>; typedef std::vector Buildables; From 9dfb97c987d8b9d6a3d15f016e40f22f91deb764 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 5 Apr 2021 09:24:42 -0400 Subject: [PATCH 069/510] "newtype" BuildableReq This makes for better types errors and allows us to give it methods. --- src/libstore/build/derivation-goal.cc | 4 ++-- src/libstore/build/entry-points.cc | 2 +- src/libstore/build/local-derivation-goal.cc | 4 ++-- src/libstore/buildable.cc | 6 +++--- src/libstore/buildable.hh | 14 +++++++++++--- src/libstore/misc.cc | 4 ++-- src/libstore/path-with-outputs.cc | 2 +- src/libstore/remote-store.cc | 4 ++-- 8 files changed, 24 insertions(+), 16 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 8680d0bce..8396abbcd 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -73,7 +73,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, state = &DerivationGoal::getDerivation; name = fmt( "building of '%s' from .drv file", - to_string(worker.store, BuildableReqFromDrv { drvPath, wantedOutputs })); + BuildableReqFromDrv { drvPath, wantedOutputs }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -94,7 +94,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation state = &DerivationGoal::haveDerivation; name = fmt( "building of '%s' from in-memory derivation", - to_string(worker.store, BuildableReqFromDrv { drvPath, drv.outputNames() })); + BuildableReqFromDrv { drvPath, drv.outputNames() }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index d1973d78b..fc6294545 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -19,7 +19,7 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMo [&](BuildableOpaque bo) { goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair)); }, - }, br); + }, br.raw()); } worker.run(goals); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index c245527c9..6cc384719 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1200,7 +1200,7 @@ static StorePath pathPartOfReq(const BuildableReq & req) [&](BuildableReqFromDrv bfd) { return bfd.drvPath; }, - }, req); + }, req.raw()); } @@ -1340,7 +1340,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo for (auto & req : paths) { if (!goal.isAllowed(req)) - throw InvalidPath("cannot build '%s' in recursive Nix because path is unknown", to_string(*next, req)); + throw InvalidPath("cannot build '%s' in recursive Nix because path is unknown", req.to_string(*next)); } next->buildPaths(paths, buildMode); diff --git a/src/libstore/buildable.cc b/src/libstore/buildable.cc index 7892b94e4..31fef2faa 100644 --- a/src/libstore/buildable.cc +++ b/src/libstore/buildable.cc @@ -41,11 +41,11 @@ std::string BuildableReqFromDrv::to_string(const Store & store) const { + (outputs.empty() ? std::string { "*" } : concatStringsSep(",", outputs)); } -std::string to_string(const Store & store, const BuildableReq & req) +std::string BuildableReq::to_string(const Store & store) const { return std::visit( [&](const auto & req) { return req.to_string(store); }, - req); + this->raw()); } @@ -66,7 +66,7 @@ BuildableReqFromDrv BuildableReqFromDrv::parse(const Store & store, std::string_ return {drvPath, outputs}; } -BuildableReq parseBuildableReq(const Store & store, std::string_view s) +BuildableReq BuildableReq::parse(const Store & store, std::string_view s) { size_t n = s.find("!"); return n == s.npos diff --git a/src/libstore/buildable.hh b/src/libstore/buildable.hh index 54e627271..8317f3995 100644 --- a/src/libstore/buildable.hh +++ b/src/libstore/buildable.hh @@ -28,14 +28,22 @@ struct BuildableReqFromDrv { static BuildableReqFromDrv parse(const Store & store, std::string_view); }; -using BuildableReq = std::variant< +using _BuildableReqRaw = std::variant< BuildableOpaque, BuildableReqFromDrv >; -std::string to_string(const Store & store, const BuildableReq &); +struct BuildableReq : _BuildableReqRaw { + using Raw = _BuildableReqRaw; + using Raw::Raw; -BuildableReq parseBuildableReq(const Store & store, std::string_view); + inline const Raw & raw() const { + return static_cast(*this); + } + + std::string to_string(const Store & store) const; + static BuildableReq parse(const Store & store, std::string_view); +}; struct BuildableFromDrv { StorePath drvPath; diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index e702a4f9e..abfae1502 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -187,7 +187,7 @@ void Store::queryMissing(const std::vector & targets, { auto state(state_.lock()); - if (!state->done.insert(to_string(*this, req)).second) return; + if (!state->done.insert(req.to_string(*this)).second) return; } std::visit(overloaded { @@ -250,7 +250,7 @@ void Store::queryMissing(const std::vector & targets, for (auto & ref : info->second.references) pool.enqueue(std::bind(doPath, BuildableOpaque { ref })); }, - }, req); + }, req.raw()); }; for (auto & path : targets) diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index 353286ac6..2898b8d4f 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -41,7 +41,7 @@ std::variant StorePathWithOutputs::tryFromBuild [&](BuildableReqFromDrv bfd) -> std::variant { return StorePathWithOutputs { bfd.drvPath, bfd.outputs }; }, - }, p); + }, p.raw()); } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index de1c95ed6..cb6402213 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -55,12 +55,12 @@ void write(const Store & store, Sink & out, const ContentAddress & ca) BuildableReq read(const Store & store, Source & from, Phantom _) { auto s = readString(from); - return parseBuildableReq(store, s); + return BuildableReq::parse(store, s); } void write(const Store & store, Sink & out, const BuildableReq & req) { - out << to_string(store, req); + out << req.to_string(store); } From 9b805d36ac70545fc4c0d863e21e0c2e5f2518a1 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 5 Apr 2021 09:48:18 -0400 Subject: [PATCH 070/510] Rename Buildable --- src/libcmd/command.cc | 6 +-- src/libcmd/command.hh | 4 +- src/libcmd/installables.cc | 44 ++++++++++----------- src/libcmd/installables.hh | 6 +-- src/libexpr/primops.cc | 6 +-- src/libmain/shared.cc | 2 +- src/libmain/shared.hh | 2 +- src/libstore/build/derivation-goal.cc | 4 +- src/libstore/build/entry-points.cc | 6 +-- src/libstore/build/local-derivation-goal.cc | 16 ++++---- src/libstore/build/local-derivation-goal.hh | 2 +- src/libstore/build/worker.cc | 6 +-- src/libstore/buildable.cc | 24 +++++------ src/libstore/buildable.hh | 37 +++++++++-------- src/libstore/daemon.cc | 12 +++--- src/libstore/legacy-ssh-store.cc | 4 +- src/libstore/misc.cc | 16 ++++---- src/libstore/path-with-outputs.cc | 18 ++++----- src/libstore/path-with-outputs.hh | 6 +-- src/libstore/remote-store.cc | 18 ++++----- src/libstore/remote-store.hh | 4 +- src/libstore/store-api.cc | 8 ++-- src/libstore/store-api.hh | 4 +- src/libstore/worker-protocol.hh | 2 +- src/nix-build/nix-build.cc | 2 +- src/nix-env/nix-env.cc | 12 +++--- src/nix-env/user-env.cc | 4 +- src/nix-store/nix-store.cc | 8 ++-- src/nix/build.cc | 6 +-- src/nix/bundle.cc | 4 +- src/nix/develop.cc | 8 ++-- src/nix/flake.cc | 4 +- src/nix/log.cc | 6 +-- src/nix/profile.cc | 14 +++---- src/nix/run.cc | 2 +- 35 files changed, 165 insertions(+), 162 deletions(-) diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index d29954f67..dc1fbc43f 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -162,7 +162,7 @@ void MixProfile::updateProfile(const StorePath & storePath) profile2, storePath)); } -void MixProfile::updateProfile(const Buildables & buildables) +void MixProfile::updateProfile(const DerivedPathsWithHints & buildables) { if (!profile) return; @@ -170,10 +170,10 @@ void MixProfile::updateProfile(const Buildables & buildables) for (auto & buildable : buildables) { std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { result.push_back(bo.path); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { for (auto & output : bfd.outputs) { /* Output path should be known because we just tried to build it. */ diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index e66c697eb..9e18c6e51 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -216,7 +216,7 @@ static RegisterCommand registerCommand2(std::vector && name) return RegisterCommand(std::move(name), [](){ return make_ref(); }); } -Buildables build(ref store, Realise mode, +DerivedPathsWithHints build(ref store, Realise mode, std::vector> installables, BuildMode bMode = bmNormal); std::set toStorePaths(ref store, @@ -252,7 +252,7 @@ struct MixProfile : virtual StoreCommand /* If 'profile' is set, make it point at the store path produced by 'buildables'. */ - void updateProfile(const Buildables & buildables); + void updateProfile(const DerivedPathsWithHints & buildables); }; struct MixDefaultProfile : MixProfile diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index b68c5f6a7..f091ac186 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -285,9 +285,9 @@ void completeFlakeRef(ref store, std::string_view prefix) } } -Buildable Installable::toBuildable() +DerivedPathWithHints Installable::toDerivedPathWithHints() { - auto buildables = toBuildables(); + auto buildables = toDerivedPathsWithHints(); if (buildables.size() != 1) throw Error("installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size()); return std::move(buildables[0]); @@ -321,7 +321,7 @@ struct InstallableStorePath : Installable std::string what() override { return store->printStorePath(storePath); } - Buildables toBuildables() override + DerivedPathsWithHints toDerivedPathsWithHints() override { if (storePath.isDerivation()) { std::map> outputs; @@ -329,14 +329,14 @@ struct InstallableStorePath : Installable for (auto & [name, output] : drv.outputsAndOptPaths(*store)) outputs.emplace(name, output.second); return { - BuildableFromDrv { + DerivedPathWithHintsBuilt { .drvPath = storePath, .outputs = std::move(outputs) } }; } else { return { - BuildableOpaque { + DerivedPathOpaque { .path = storePath, } }; @@ -349,9 +349,9 @@ struct InstallableStorePath : Installable } }; -Buildables InstallableValue::toBuildables() +DerivedPathsWithHints InstallableValue::toDerivedPathsWithHints() { - Buildables res; + DerivedPathsWithHints res; std::map>> drvsToOutputs; @@ -364,7 +364,7 @@ Buildables InstallableValue::toBuildables() } for (auto & i : drvsToOutputs) - res.push_back(BuildableFromDrv { i.first, i.second }); + res.push_back(DerivedPathWithHintsBuilt { i.first, i.second }); return res; } @@ -671,28 +671,28 @@ std::shared_ptr SourceExprCommand::parseInstallable( return installables.front(); } -Buildables build(ref store, Realise mode, +DerivedPathsWithHints build(ref store, Realise mode, std::vector> installables, BuildMode bMode) { if (mode == Realise::Nothing) settings.readOnlyMode = true; - Buildables buildables; + DerivedPathsWithHints buildables; - std::vector pathsToBuild; + std::vector pathsToBuild; for (auto & i : installables) { - for (auto & b : i->toBuildables()) { + for (auto & b : i->toDerivedPathsWithHints()) { std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { pathsToBuild.push_back(bo); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { StringSet outputNames; for (auto & output : bfd.outputs) outputNames.insert(output.first); pathsToBuild.push_back( - BuildableReqFromDrv{bfd.drvPath, outputNames}); + DerivedPath::Built{bfd.drvPath, outputNames}); }, }, b); buildables.push_back(std::move(b)); @@ -717,10 +717,10 @@ std::set toRealisedPaths( if (operateOn == OperateOn::Output) { for (auto & b : build(store, mode, installables)) std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { res.insert(bo.path); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { auto drv = store->readDerivation(bfd.drvPath); auto outputHashes = staticOutputHashes(*store, drv); for (auto & output : bfd.outputs) { @@ -751,8 +751,8 @@ std::set toRealisedPaths( settings.readOnlyMode = true; for (auto & i : installables) - for (auto & b : i->toBuildables()) - if (auto bfd = std::get_if(&b)) + for (auto & b : i->toDerivedPathsWithHints()) + if (auto bfd = std::get_if(&b)) res.insert(bfd->drvPath); } @@ -787,9 +787,9 @@ StorePathSet toDerivations(ref store, StorePathSet drvPaths; for (auto & i : installables) - for (auto & b : i->toBuildables()) + for (auto & b : i->toDerivedPathsWithHints()) std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { if (!useDeriver) throw Error("argument '%s' did not evaluate to a derivation", i->what()); auto derivers = store->queryValidDerivers(bo.path); @@ -798,7 +798,7 @@ StorePathSet toDerivations(ref store, // FIXME: use all derivers? drvPaths.insert(*derivers.begin()); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { drvPaths.insert(bfd.drvPath); }, }, b); diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index e5c6fe208..0bc932b52 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -29,9 +29,9 @@ struct Installable virtual std::string what() = 0; - virtual Buildables toBuildables() = 0; + virtual DerivedPathsWithHints toDerivedPathsWithHints() = 0; - Buildable toBuildable(); + DerivedPathWithHints toDerivedPathWithHints(); App toApp(EvalState & state); @@ -74,7 +74,7 @@ struct InstallableValue : Installable virtual std::vector toDerivations() = 0; - Buildables toBuildables() override; + DerivedPathsWithHints toDerivedPathsWithHints() override; }; struct InstallableFlake : InstallableValue diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 24bc34b74..428adf4c2 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -35,7 +35,7 @@ InvalidPathError::InvalidPathError(const Path & path) : void EvalState::realiseContext(const PathSet & context) { - std::vector drvs; + std::vector drvs; for (auto & i : context) { auto [ctxS, outputName] = decodeContext(i); @@ -56,8 +56,8 @@ void EvalState::realiseContext(const PathSet & context) /* For performance, prefetch all substitute info. */ StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; - std::vector buildReqs; - for (auto & d : drvs) buildReqs.emplace_back(BuildableReq { d }); + std::vector buildReqs; + for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); store->queryMissing(buildReqs, willBuild, willSubstitute, unknown, downloadSize, narSize); store->buildPaths(buildReqs); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 20027e099..09af57871 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -36,7 +36,7 @@ void printGCWarning() } -void printMissing(ref store, const std::vector & paths, Verbosity lvl) +void printMissing(ref store, const std::vector & paths, Verbosity lvl) { uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 18e0fb57d..9cb9e6da2 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -43,7 +43,7 @@ struct StorePathWithOutputs; void printMissing( ref store, - const std::vector & paths, + const std::vector & paths, Verbosity lvl = lvlInfo); void printMissing(ref store, const StorePathSet & willBuild, diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 8396abbcd..3ce538f77 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -73,7 +73,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, state = &DerivationGoal::getDerivation; name = fmt( "building of '%s' from .drv file", - BuildableReqFromDrv { drvPath, wantedOutputs }.to_string(worker.store)); + DerivedPath::Built { drvPath, wantedOutputs }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -94,7 +94,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation state = &DerivationGoal::haveDerivation; name = fmt( "building of '%s' from in-memory derivation", - BuildableReqFromDrv { drvPath, drv.outputNames() }.to_string(worker.store)); + DerivedPath::Built { drvPath, drv.outputNames() }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index fc6294545..732d4785d 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -6,17 +6,17 @@ namespace nix { -void Store::buildPaths(const std::vector & reqs, BuildMode buildMode) +void Store::buildPaths(const std::vector & reqs, BuildMode buildMode) { Worker worker(*this); Goals goals; for (auto & br : reqs) { std::visit(overloaded { - [&](BuildableReqFromDrv bfd) { + [&](DerivedPath::Built bfd) { goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode)); }, - [&](BuildableOpaque bo) { + [&](DerivedPath::Opaque bo) { goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair)); }, }, br.raw()); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 6cc384719..7c1402918 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1191,20 +1191,20 @@ void LocalDerivationGoal::writeStructuredAttrs() } -static StorePath pathPartOfReq(const BuildableReq & req) +static StorePath pathPartOfReq(const DerivedPath & req) { return std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPath::Opaque bo) { return bo.path; }, - [&](BuildableReqFromDrv bfd) { + [&](DerivedPath::Built bfd) { return bfd.drvPath; }, }, req.raw()); } -bool LocalDerivationGoal::isAllowed(const BuildableReq & req) +bool LocalDerivationGoal::isAllowed(const DerivedPath & req) { return this->isAllowed(pathPartOfReq(req)); } @@ -1332,7 +1332,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo // an allowed derivation { throw Error("queryRealisation"); } - void buildPaths(const std::vector & paths, BuildMode buildMode) override + void buildPaths(const std::vector & paths, BuildMode buildMode) override { if (buildMode != bmNormal) throw Error("unsupported build mode"); @@ -1346,7 +1346,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo next->buildPaths(paths, buildMode); for (auto & path : paths) { - auto p = std::get_if(&path); + auto p = std::get_if(&path); if (!p) continue; auto & bfd = *p; auto outputs = next->queryDerivationOutputMap(bfd.drvPath); @@ -1380,7 +1380,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo void addSignatures(const StorePath & storePath, const StringSet & sigs) override { unsupported("addSignatures"); } - void queryMissing(const std::vector & targets, + void queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) override { @@ -1388,7 +1388,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo client about what paths will be built/substituted or are already present. Probably not a big deal. */ - std::vector allowed; + std::vector allowed; for (auto & req : targets) { if (goal.isAllowed(req)) allowed.emplace_back(req); diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index edb93f84e..d30be2351 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -116,7 +116,7 @@ struct LocalDerivationGoal : public DerivationGoal { return inputPaths.count(path) || addedPaths.count(path); } - bool isAllowed(const BuildableReq & req); + bool isAllowed(const DerivedPath & req); friend struct RestrictedStore; diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index fef4cb0cb..6c04d3ed3 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -226,14 +226,14 @@ void Worker::waitForAWhile(GoalPtr goal) void Worker::run(const Goals & _topGoals) { - std::vector topPaths; + std::vector topPaths; for (auto & i : _topGoals) { topGoals.insert(i); if (auto goal = dynamic_cast(i.get())) { - topPaths.push_back(BuildableReqFromDrv{goal->drvPath, goal->wantedOutputs}); + topPaths.push_back(DerivedPath::Built{goal->drvPath, goal->wantedOutputs}); } else if (auto goal = dynamic_cast(i.get())) { - topPaths.push_back(BuildableOpaque{goal->storePath}); + topPaths.push_back(DerivedPath::Opaque{goal->storePath}); } } diff --git a/src/libstore/buildable.cc b/src/libstore/buildable.cc index 31fef2faa..a8c0c70b1 100644 --- a/src/libstore/buildable.cc +++ b/src/libstore/buildable.cc @@ -5,13 +5,13 @@ namespace nix { -nlohmann::json BuildableOpaque::toJSON(ref store) const { +nlohmann::json DerivedPath::Opaque::toJSON(ref store) const { nlohmann::json res; res["path"] = store->printStorePath(path); return res; } -nlohmann::json BuildableFromDrv::toJSON(ref store) const { +nlohmann::json DerivedPathWithHintsBuilt::toJSON(ref store) const { nlohmann::json res; res["drvPath"] = store->printStorePath(drvPath); for (const auto& [output, path] : outputs) { @@ -20,9 +20,9 @@ nlohmann::json BuildableFromDrv::toJSON(ref store) const { return res; } -nlohmann::json buildablesToJSON(const Buildables & buildables, ref store) { +nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref store) { auto res = nlohmann::json::array(); - for (const Buildable & buildable : buildables) { + for (const DerivedPathWithHints & buildable : buildables) { std::visit([&res, store](const auto & buildable) { res.push_back(buildable.toJSON(store)); }, buildable); @@ -31,17 +31,17 @@ nlohmann::json buildablesToJSON(const Buildables & buildables, ref store) } -std::string BuildableOpaque::to_string(const Store & store) const { +std::string DerivedPath::Opaque::to_string(const Store & store) const { return store.printStorePath(path); } -std::string BuildableReqFromDrv::to_string(const Store & store) const { +std::string DerivedPath::Built::to_string(const Store & store) const { return store.printStorePath(drvPath) + "!" + (outputs.empty() ? std::string { "*" } : concatStringsSep(",", outputs)); } -std::string BuildableReq::to_string(const Store & store) const +std::string DerivedPath::to_string(const Store & store) const { return std::visit( [&](const auto & req) { return req.to_string(store); }, @@ -49,12 +49,12 @@ std::string BuildableReq::to_string(const Store & store) const } -BuildableOpaque BuildableOpaque::parse(const Store & store, std::string_view s) +DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_view s) { return {store.parseStorePath(s)}; } -BuildableReqFromDrv BuildableReqFromDrv::parse(const Store & store, std::string_view s) +DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view s) { size_t n = s.find("!"); assert(n != s.npos); @@ -66,12 +66,12 @@ BuildableReqFromDrv BuildableReqFromDrv::parse(const Store & store, std::string_ return {drvPath, outputs}; } -BuildableReq BuildableReq::parse(const Store & store, std::string_view s) +DerivedPath DerivedPath::parse(const Store & store, std::string_view s) { size_t n = s.find("!"); return n == s.npos - ? (BuildableReq) BuildableOpaque::parse(store, s) - : (BuildableReq) BuildableReqFromDrv::parse(store, s); + ? (DerivedPath) DerivedPath::Opaque::parse(store, s) + : (DerivedPath) DerivedPath::Built::parse(store, s); } } diff --git a/src/libstore/buildable.hh b/src/libstore/buildable.hh index 8317f3995..0a0cf8105 100644 --- a/src/libstore/buildable.hh +++ b/src/libstore/buildable.hh @@ -12,54 +12,57 @@ namespace nix { class Store; -struct BuildableOpaque { +struct DerivedPathOpaque { StorePath path; nlohmann::json toJSON(ref store) const; std::string to_string(const Store & store) const; - static BuildableOpaque parse(const Store & store, std::string_view); + static DerivedPathOpaque parse(const Store & store, std::string_view); }; -struct BuildableReqFromDrv { +struct DerivedPathBuilt { StorePath drvPath; std::set outputs; std::string to_string(const Store & store) const; - static BuildableReqFromDrv parse(const Store & store, std::string_view); + static DerivedPathBuilt parse(const Store & store, std::string_view); }; -using _BuildableReqRaw = std::variant< - BuildableOpaque, - BuildableReqFromDrv +using _DerivedPathRaw = std::variant< + DerivedPathOpaque, + DerivedPathBuilt >; -struct BuildableReq : _BuildableReqRaw { - using Raw = _BuildableReqRaw; +struct DerivedPath : _DerivedPathRaw { + using Raw = _DerivedPathRaw; using Raw::Raw; + using Opaque = DerivedPathOpaque; + using Built = DerivedPathBuilt; + inline const Raw & raw() const { return static_cast(*this); } std::string to_string(const Store & store) const; - static BuildableReq parse(const Store & store, std::string_view); + static DerivedPath parse(const Store & store, std::string_view); }; -struct BuildableFromDrv { +struct DerivedPathWithHintsBuilt { StorePath drvPath; std::map> outputs; nlohmann::json toJSON(ref store) const; - static BuildableFromDrv parse(const Store & store, std::string_view); + static DerivedPathWithHintsBuilt parse(const Store & store, std::string_view); }; -using Buildable = std::variant< - BuildableOpaque, - BuildableFromDrv +using DerivedPathWithHints = std::variant< + DerivedPath::Opaque, + DerivedPathWithHintsBuilt >; -typedef std::vector Buildables; +typedef std::vector DerivedPathsWithHints; -nlohmann::json buildablesToJSON(const Buildables & buildables, ref store); +nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref store); } diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 6b527dcb2..affd60472 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -260,14 +260,14 @@ static void writeValidPathInfo( } } -static std::vector readBuildableReqs(Store & store, unsigned int clientVersion, Source & from) +static std::vector readDerivedPaths(Store & store, unsigned int clientVersion, Source & from) { - std::vector reqs; + std::vector reqs; if (GET_PROTOCOL_MINOR(clientVersion) >= 29) { - reqs = worker_proto::read(store, from, Phantom> {}); + reqs = worker_proto::read(store, from, Phantom> {}); } else { for (auto & s : readStrings(from)) - reqs.push_back(parsePathWithOutputs(store, s).toBuildableReq()); + reqs.push_back(parsePathWithOutputs(store, s).toDerivedPath()); } return reqs; } @@ -506,7 +506,7 @@ static void performOp(TunnelLogger * logger, ref store, } case wopBuildPaths: { - auto drvs = readBuildableReqs(*store, clientVersion, from); + auto drvs = readDerivedPaths(*store, clientVersion, from); BuildMode mode = bmNormal; if (GET_PROTOCOL_MINOR(clientVersion) >= 15) { mode = (BuildMode) readInt(from); @@ -870,7 +870,7 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryMissing: { - auto targets = readBuildableReqs(*store, clientVersion, from); + auto targets = readDerivedPaths(*store, clientVersion, from); logger->startWork(); StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 1cb977be6..edaf75136 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -267,14 +267,14 @@ public: return status; } - void buildPaths(const std::vector & drvPaths, BuildMode buildMode) override + void buildPaths(const std::vector & drvPaths, BuildMode buildMode) override { auto conn(connections->get()); conn->to << cmdBuildPaths; Strings ss; for (auto & p : drvPaths) { - auto sOrDrvPath = StorePathWithOutputs::tryFromBuildableReq(p); + auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); std::visit(overloaded { [&](StorePathWithOutputs s) { ss.push_back(s.to_string(*this)); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index abfae1502..a99a2fc78 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -117,7 +117,7 @@ std::optional getDerivationCA(const BasicDerivation & drv) return std::nullopt; } -void Store::queryMissing(const std::vector & targets, +void Store::queryMissing(const std::vector & targets, StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_, uint64_t & downloadSize_, uint64_t & narSize_) { @@ -145,7 +145,7 @@ void Store::queryMissing(const std::vector & targets, Sync state_(State{{}, unknown_, willSubstitute_, willBuild_, downloadSize_, narSize_}); - std::function doPath; + std::function doPath; auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) { { @@ -154,7 +154,7 @@ void Store::queryMissing(const std::vector & targets, } for (auto & i : drv.inputDrvs) - pool.enqueue(std::bind(doPath, BuildableReqFromDrv { i.first, i.second })); + pool.enqueue(std::bind(doPath, DerivedPath::Built { i.first, i.second })); }; auto checkOutput = [&]( @@ -177,13 +177,13 @@ void Store::queryMissing(const std::vector & targets, drvState->outPaths.insert(outPath); if (!drvState->left) { for (auto & path : drvState->outPaths) - pool.enqueue(std::bind(doPath, BuildableOpaque { path } )); + pool.enqueue(std::bind(doPath, DerivedPath::Opaque { path } )); } } } }; - doPath = [&](const BuildableReq & req) { + doPath = [&](const DerivedPath & req) { { auto state(state_.lock()); @@ -191,7 +191,7 @@ void Store::queryMissing(const std::vector & targets, } std::visit(overloaded { - [&](BuildableReqFromDrv bfd) { + [&](DerivedPath::Built bfd) { if (!isValidPath(bfd.drvPath)) { // FIXME: we could try to substitute the derivation. auto state(state_.lock()); @@ -224,7 +224,7 @@ void Store::queryMissing(const std::vector & targets, mustBuildDrv(bfd.drvPath, *drv); }, - [&](BuildableOpaque bo) { + [&](DerivedPath::Opaque bo) { if (isValidPath(bo.path)) return; @@ -248,7 +248,7 @@ void Store::queryMissing(const std::vector & targets, } for (auto & ref : info->second.references) - pool.enqueue(std::bind(doPath, BuildableOpaque { ref })); + pool.enqueue(std::bind(doPath, DerivedPath::Opaque { ref })); }, }, req.raw()); }; diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index 2898b8d4f..865d64cf2 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -11,34 +11,34 @@ std::string StorePathWithOutputs::to_string(const Store & store) const } -BuildableReq StorePathWithOutputs::toBuildableReq() const +DerivedPath StorePathWithOutputs::toDerivedPath() const { if (!outputs.empty() || path.isDerivation()) - return BuildableReqFromDrv { path, outputs }; + return DerivedPath::Built { path, outputs }; else - return BuildableOpaque { path }; + return DerivedPath::Opaque { path }; } -std::vector toBuildableReqs(const std::vector ss) +std::vector toDerivedPaths(const std::vector ss) { - std::vector reqs; - for (auto & s : ss) reqs.push_back(s.toBuildableReq()); + std::vector reqs; + for (auto & s : ss) reqs.push_back(s.toDerivedPath()); return reqs; } -std::variant StorePathWithOutputs::tryFromBuildableReq(const BuildableReq & p) +std::variant StorePathWithOutputs::tryFromDerivedPath(const DerivedPath & p) { return std::visit(overloaded { - [&](BuildableOpaque bo) -> std::variant { + [&](DerivedPath::Opaque bo) -> std::variant { if (bo.path.isDerivation()) { // drv path gets interpreted as "build", not "get drv file itself" return bo.path; } return StorePathWithOutputs { bo.path }; }, - [&](BuildableReqFromDrv bfd) -> std::variant { + [&](DerivedPath::Built bfd) -> std::variant { return StorePathWithOutputs { bfd.drvPath, bfd.outputs }; }, }, p.raw()); diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index 870cac08e..749348398 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -14,12 +14,12 @@ struct StorePathWithOutputs std::string to_string(const Store & store) const; - BuildableReq toBuildableReq() const; + DerivedPath toDerivedPath() const; - static std::variant tryFromBuildableReq(const BuildableReq &); + static std::variant tryFromDerivedPath(const DerivedPath &); }; -std::vector toBuildableReqs(const std::vector); +std::vector toDerivedPaths(const std::vector); std::pair parsePathWithOutputs(std::string_view s); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index cb6402213..761b4a087 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -52,13 +52,13 @@ void write(const Store & store, Sink & out, const ContentAddress & ca) } -BuildableReq read(const Store & store, Source & from, Phantom _) +DerivedPath read(const Store & store, Source & from, Phantom _) { auto s = readString(from); - return BuildableReq::parse(store, s); + return DerivedPath::parse(store, s); } -void write(const Store & store, Sink & out, const BuildableReq & req) +void write(const Store & store, Sink & out, const DerivedPath & req) { out << req.to_string(store); } @@ -670,14 +670,14 @@ std::optional RemoteStore::queryRealisation(const DrvOutput & return {Realisation{.id = id, .outPath = *outPaths.begin()}}; } -static void writeBuildableReqs(RemoteStore & store, ConnectionHandle & conn, const std::vector & reqs) +static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, const std::vector & reqs) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) { worker_proto::write(store, conn->to, reqs); } else { Strings ss; for (auto & p : reqs) { - auto sOrDrvPath = StorePathWithOutputs::tryFromBuildableReq(p); + auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); std::visit(overloaded { [&](StorePathWithOutputs s) { ss.push_back(s.to_string(store)); @@ -694,12 +694,12 @@ static void writeBuildableReqs(RemoteStore & store, ConnectionHandle & conn, con } } -void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode) +void RemoteStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode) { auto conn(getConnection()); conn->to << wopBuildPaths; assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13); - writeBuildableReqs(*this, conn, drvPaths); + writeDerivedPaths(*this, conn, drvPaths); if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15) conn->to << buildMode; else @@ -838,7 +838,7 @@ void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & s } -void RemoteStore::queryMissing(const std::vector & targets, +void RemoteStore::queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) { @@ -849,7 +849,7 @@ void RemoteStore::queryMissing(const std::vector & targets, // to prevent a deadlock. goto fallback; conn->to << wopQueryMissing; - writeBuildableReqs(*this, conn, targets); + writeDerivedPaths(*this, conn, targets); conn.processStderr(); willBuild = worker_proto::read(*this, conn->from, Phantom {}); willSubstitute = worker_proto::read(*this, conn->from, Phantom {}); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 20d366038..6cf76a46d 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -85,7 +85,7 @@ public: std::optional queryRealisation(const DrvOutput &) override; - void buildPaths(const std::vector & paths, BuildMode buildMode) override; + void buildPaths(const std::vector & paths, BuildMode buildMode) override; BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; @@ -108,7 +108,7 @@ public: void addSignatures(const StorePath & storePath, const StringSet & sigs) override; - void queryMissing(const std::vector & targets, + void queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 8b60bdc62..93fcb068f 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -529,10 +529,10 @@ void Store::queryPathInfo(const StorePath & storePath, void Store::substitutePaths(const StorePathSet & paths) { - std::vector paths2; + std::vector paths2; for (auto & path : paths) if (!path.isDerivation()) - paths2.push_back(BuildableOpaque{path}); + paths2.push_back(DerivedPath::Opaque{path}); uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; queryMissing(paths2, @@ -540,8 +540,8 @@ void Store::substitutePaths(const StorePathSet & paths) if (!willSubstitute.empty()) try { - std::vector subs; - for (auto & p : willSubstitute) subs.push_back(BuildableOpaque{p}); + std::vector subs; + for (auto & p : willSubstitute) subs.push_back(DerivedPath::Opaque{p}); buildPaths(subs); } catch (Error & e) { logWarning(e.info()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 59d0983df..483f3c5fa 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -494,7 +494,7 @@ public: recursively building any sub-derivations. For inputs that are not derivations, substitute them. */ virtual void buildPaths( - const std::vector & paths, + const std::vector & paths, BuildMode buildMode = bmNormal); /* Build a single non-materialized derivation (i.e. not from an @@ -656,7 +656,7 @@ public: /* Given a set of paths that are to be built, return the set of derivations that will be built, and the set of output paths that will be substituted. */ - virtual void queryMissing(const std::vector & targets, + virtual void queryMissing(const std::vector & targets, StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize); diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 0255726ac..001ed25e3 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -86,7 +86,7 @@ namespace worker_proto { MAKE_WORKER_PROTO(, std::string); MAKE_WORKER_PROTO(, StorePath); MAKE_WORKER_PROTO(, ContentAddress); -MAKE_WORKER_PROTO(, BuildableReq); +MAKE_WORKER_PROTO(, DerivedPath); MAKE_WORKER_PROTO(, Realisation); MAKE_WORKER_PROTO(, DrvOutput); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 6f8a61261..d46bc1f2b 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -323,7 +323,7 @@ static void main_nix_build(int argc, char * * argv) state->printStats(); auto buildPaths = [&](const std::vector & paths0) { - auto paths = toBuildableReqs(paths0); + auto paths = toDerivedPaths(paths0); /* Note: we do this even when !printMissing to efficiently fetch binary cache data. */ uint64_t downloadSize, narSize; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index af1c69b87..e04954d45 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -419,13 +419,13 @@ static void queryInstSources(EvalState & state, static void printMissing(EvalState & state, DrvInfos & elems) { - std::vector targets; + std::vector targets; for (auto & i : elems) { Path drvPath = i.queryDrvPath(); if (drvPath != "") - targets.push_back(BuildableReqFromDrv{state.store->parseStorePath(drvPath)}); + targets.push_back(DerivedPath::Built{state.store->parseStorePath(drvPath)}); else - targets.push_back(BuildableOpaque{state.store->parseStorePath(i.queryOutPath())}); + targets.push_back(DerivedPath::Opaque{state.store->parseStorePath(i.queryOutPath())}); } printMissing(state.store, targets); @@ -694,12 +694,12 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) if (globals.forceName != "") drv.setName(globals.forceName); - std::vector paths { + std::vector paths { (drv.queryDrvPath() != "") - ? (BuildableReq) (BuildableReqFromDrv { + ? (DerivedPath) (DerivedPath::Built { globals.state->store->parseStorePath(drv.queryDrvPath()) }) - : (BuildableReq) (BuildableOpaque { + : (DerivedPath) (DerivedPath::Opaque { globals.state->store->parseStorePath(drv.queryOutPath()) }), }; diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 0ccf960fb..5ceb2ae67 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -43,7 +43,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, debug(format("building user environment dependencies")); state.store->buildPaths( - toBuildableReqs(drvsToBuild), + toDerivedPaths(drvsToBuild), state.repair ? bmRepair : bmNormal); /* Construct the whole top level derivation. */ @@ -140,7 +140,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, std::vector topLevelDrvs; topLevelDrvs.push_back({topLevelDrv}); state.store->buildPaths( - toBuildableReqs(topLevelDrvs), + toDerivedPaths(topLevelDrvs), state.repair ? bmRepair : bmNormal); /* Switch the current user environment to the output path. */ diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 21c1e547b..b327793e7 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -63,7 +63,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true) auto store2 = std::dynamic_pointer_cast(store); if (path.path.isDerivation()) { - if (build) store->buildPaths({path.toBuildableReq()}); + if (build) store->buildPaths({path.toDerivedPath()}); auto outputPaths = store->queryDerivationOutputMap(path.path); Derivation drv = store->derivationFromPath(path.path); rootNr++; @@ -134,7 +134,7 @@ static void opRealise(Strings opFlags, Strings opArgs) uint64_t downloadSize, narSize; StorePathSet willBuild, willSubstitute, unknown; store->queryMissing( - toBuildableReqs(paths), + toDerivedPaths(paths), willBuild, willSubstitute, unknown, downloadSize, narSize); if (ignoreUnknown) { @@ -151,7 +151,7 @@ static void opRealise(Strings opFlags, Strings opArgs) if (dryRun) return; /* Build all paths at the same time to exploit parallelism. */ - store->buildPaths(toBuildableReqs(paths), buildMode); + store->buildPaths(toDerivedPaths(paths), buildMode); if (!ignoreUnknown) for (auto & i : paths) { @@ -882,7 +882,7 @@ static void opServe(Strings opFlags, Strings opArgs) try { MonitorFdHup monitor(in.fd); - store->buildPaths(toBuildableReqs(paths)); + store->buildPaths(toDerivedPaths(paths)); out << 0; } catch (Error & e) { assert(e.status); diff --git a/src/nix/build.cc b/src/nix/build.cc index 724ce9d79..0529ed382 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -61,12 +61,12 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile for (const auto & [_i, buildable] : enumerate(buildables)) { auto i = _i; std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { std::string symlink = outLink; if (i) symlink += fmt("-%d", i); store2->addPermRoot(bo.path, absPath(symlink)); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { auto builtOutputs = store->queryDerivationOutputMap(bfd.drvPath); for (auto & output : builtOutputs) { std::string symlink = outLink; @@ -80,7 +80,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile updateProfile(buildables); - if (json) logger->cout("%s", buildablesToJSON(buildables, store).dump()); + if (json) logger->cout("%s", derivedPathsWithHintsToJSON(buildables, store).dump()); } }; diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index e86fbb3f7..53dccc63a 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -70,7 +70,7 @@ struct CmdBundle : InstallableCommand auto evalState = getEvalState(); auto app = installable->toApp(*evalState); - store->buildPaths(toBuildableReqs(app.context)); + store->buildPaths(toDerivedPaths(app.context)); auto [bundlerFlakeRef, bundlerName] = parseFlakeRefWithFragment(bundler, absPath(".")); const flake::LockFlags lockFlags{ .writeLockFile = false }; @@ -110,7 +110,7 @@ struct CmdBundle : InstallableCommand StorePath outPath = store->parseStorePath(evalState->coerceToPath(*attr2->pos, *attr2->value, context2)); - store->buildPaths({ BuildableReqFromDrv { drvPath } }); + store->buildPaths({ DerivedPath::Built { drvPath } }); auto outPathS = store->printStorePath(outPath); diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 616e2073e..cae6ded40 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -160,7 +160,7 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath) auto shellDrvPath = writeDerivation(*store, drv); /* Build the derivation. */ - store->buildPaths({BuildableReqFromDrv{shellDrvPath}}); + store->buildPaths({DerivedPath::Built{shellDrvPath}}); for (auto & [_0, outputAndOptPath] : drv.outputsAndOptPaths(*store)) { auto & [_1, optPath] = outputAndOptPath; @@ -265,7 +265,7 @@ struct Common : InstallableCommand, MixProfile for (auto & [installable_, dir_] : redirects) { auto dir = absPath(dir_); auto installable = parseInstallable(store, installable_); - auto buildable = installable->toBuildable(); + auto buildable = installable->toDerivedPathWithHints(); auto doRedirect = [&](const StorePath & path) { auto from = store->printStorePath(path); @@ -277,10 +277,10 @@ struct Common : InstallableCommand, MixProfile } }; std::visit(overloaded { - [&](const BuildableOpaque & bo) { + [&](const DerivedPathOpaque & bo) { doRedirect(bo.path); }, - [&](const BuildableFromDrv & bfd) { + [&](const DerivedPathWithHintsBuilt & bfd) { for (auto & [outputName, path] : bfd.outputs) if (path) doRedirect(*path); }, diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 9d6d22a43..62a413e27 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -293,7 +293,7 @@ struct CmdFlakeCheck : FlakeCommand } }; - std::vector drvPaths; + std::vector drvPaths; auto checkApp = [&](const std::string & attrPath, Value & v, const Pos & pos) { try { @@ -462,7 +462,7 @@ struct CmdFlakeCheck : FlakeCommand fmt("%s.%s.%s", name, attr.name, attr2.name), *attr2.value, *attr2.pos); if ((std::string) attr.name == settings.thisSystem.get()) - drvPaths.push_back(BuildableReqFromDrv{drvPath}); + drvPaths.push_back(DerivedPath::Built{drvPath}); } } } diff --git a/src/nix/log.cc b/src/nix/log.cc index 67d3742d6..5010e3326 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -30,15 +30,15 @@ struct CmdLog : InstallableCommand subs.push_front(store); - auto b = installable->toBuildable(); + auto b = installable->toDerivedPathWithHints(); RunPager pager; for (auto & sub : subs) { auto log = std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { return sub->getBuildLog(bo.path); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { return sub->getBuildLog(bfd.drvPath); }, }, b); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index b96e71844..ad824dd70 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -233,7 +233,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile { ProfileManifest manifest(*getEvalState(), *profile); - std::vector pathsToBuild; + std::vector pathsToBuild; for (auto & installable : installables) { if (auto installable2 = std::dynamic_pointer_cast(installable)) { @@ -249,7 +249,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile attrPath, }; - pathsToBuild.push_back(BuildableReqFromDrv{drv.drvPath, StringSet{drv.outputName}}); + pathsToBuild.push_back(DerivedPath::Built{drv.drvPath, StringSet{drv.outputName}}); manifest.elements.emplace_back(std::move(element)); } else { @@ -259,16 +259,16 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile ProfileElement element; std::visit(overloaded { - [&](BuildableOpaque bo) { + [&](DerivedPathOpaque bo) { pathsToBuild.push_back(bo); element.storePaths.insert(bo.path); }, - [&](BuildableFromDrv bfd) { + [&](DerivedPathWithHintsBuilt bfd) { // TODO: Why are we querying if we know the output // names already? Is it just to figure out what the // default one is? for (auto & output : store->queryDerivationOutputMap(bfd.drvPath)) { - pathsToBuild.push_back(BuildableReqFromDrv{bfd.drvPath, {output.first}}); + pathsToBuild.push_back(DerivedPath::Built{bfd.drvPath, {output.first}}); element.storePaths.insert(output.second); } }, @@ -391,7 +391,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf auto matchers = getMatchers(store); // FIXME: code duplication - std::vector pathsToBuild; + std::vector pathsToBuild; for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); @@ -426,7 +426,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf attrPath, }; - pathsToBuild.push_back(BuildableReqFromDrv{drv.drvPath, {"out"}}); // FIXME + pathsToBuild.push_back(DerivedPath::Built{drv.drvPath, {"out"}}); // FIXME } } diff --git a/src/nix/run.cc b/src/nix/run.cc index 2e9bb41cc..ba60e57d8 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -182,7 +182,7 @@ struct CmdRun : InstallableCommand, RunCommon auto app = installable->toApp(*state); - state->store->buildPaths(toBuildableReqs(app.context)); + state->store->buildPaths(toDerivedPaths(app.context)); Strings allArgs{app.program}; for (auto & i : args) allArgs.push_back(i); From 179582872de60863fcabcf471f98930a25fd6df3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 5 Apr 2021 10:05:21 -0400 Subject: [PATCH 071/510] Make `DerivedPathWithHints` a newtype This allows us to namespace its constructors under it. --- src/libcmd/command.cc | 6 +++--- src/libcmd/installables.cc | 26 +++++++++++++------------- src/libstore/buildable.cc | 4 ++-- src/libstore/buildable.hh | 15 ++++++++++++++- src/nix/build.cc | 6 +++--- src/nix/develop.cc | 6 +++--- src/nix/log.cc | 6 +++--- src/nix/profile.cc | 6 +++--- 8 files changed, 44 insertions(+), 31 deletions(-) diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index dc1fbc43f..9da470c15 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -170,10 +170,10 @@ void MixProfile::updateProfile(const DerivedPathsWithHints & buildables) for (auto & buildable : buildables) { std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { result.push_back(bo.path); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { for (auto & output : bfd.outputs) { /* Output path should be known because we just tried to build it. */ @@ -181,7 +181,7 @@ void MixProfile::updateProfile(const DerivedPathsWithHints & buildables) result.push_back(*output.second); } }, - }, buildable); + }, buildable.raw()); } if (result.size() != 1) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index f091ac186..5d3026c1a 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -329,14 +329,14 @@ struct InstallableStorePath : Installable for (auto & [name, output] : drv.outputsAndOptPaths(*store)) outputs.emplace(name, output.second); return { - DerivedPathWithHintsBuilt { + DerivedPathWithHints::Built { .drvPath = storePath, .outputs = std::move(outputs) } }; } else { return { - DerivedPathOpaque { + DerivedPathWithHints::Opaque { .path = storePath, } }; @@ -364,7 +364,7 @@ DerivedPathsWithHints InstallableValue::toDerivedPathsWithHints() } for (auto & i : drvsToOutputs) - res.push_back(DerivedPathWithHintsBuilt { i.first, i.second }); + res.push_back(DerivedPathWithHints::Built { i.first, i.second }); return res; } @@ -684,17 +684,17 @@ DerivedPathsWithHints build(ref store, Realise mode, for (auto & i : installables) { for (auto & b : i->toDerivedPathsWithHints()) { std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { pathsToBuild.push_back(bo); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { StringSet outputNames; for (auto & output : bfd.outputs) outputNames.insert(output.first); pathsToBuild.push_back( DerivedPath::Built{bfd.drvPath, outputNames}); }, - }, b); + }, b.raw()); buildables.push_back(std::move(b)); } } @@ -717,10 +717,10 @@ std::set toRealisedPaths( if (operateOn == OperateOn::Output) { for (auto & b : build(store, mode, installables)) std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { res.insert(bo.path); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { auto drv = store->readDerivation(bfd.drvPath); auto outputHashes = staticOutputHashes(*store, drv); for (auto & output : bfd.outputs) { @@ -745,14 +745,14 @@ std::set toRealisedPaths( } } }, - }, b); + }, b.raw()); } else { if (mode == Realise::Nothing) settings.readOnlyMode = true; for (auto & i : installables) for (auto & b : i->toDerivedPathsWithHints()) - if (auto bfd = std::get_if(&b)) + if (auto bfd = std::get_if(&b)) res.insert(bfd->drvPath); } @@ -789,7 +789,7 @@ StorePathSet toDerivations(ref store, for (auto & i : installables) for (auto & b : i->toDerivedPathsWithHints()) std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { if (!useDeriver) throw Error("argument '%s' did not evaluate to a derivation", i->what()); auto derivers = store->queryValidDerivers(bo.path); @@ -798,10 +798,10 @@ StorePathSet toDerivations(ref store, // FIXME: use all derivers? drvPaths.insert(*derivers.begin()); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { drvPaths.insert(bfd.drvPath); }, - }, b); + }, b.raw()); return drvPaths; } diff --git a/src/libstore/buildable.cc b/src/libstore/buildable.cc index a8c0c70b1..eee38ba10 100644 --- a/src/libstore/buildable.cc +++ b/src/libstore/buildable.cc @@ -11,7 +11,7 @@ nlohmann::json DerivedPath::Opaque::toJSON(ref store) const { return res; } -nlohmann::json DerivedPathWithHintsBuilt::toJSON(ref store) const { +nlohmann::json DerivedPathWithHints::Built::toJSON(ref store) const { nlohmann::json res; res["drvPath"] = store->printStorePath(drvPath); for (const auto& [output, path] : outputs) { @@ -25,7 +25,7 @@ nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildab for (const DerivedPathWithHints & buildable : buildables) { std::visit([&res, store](const auto & buildable) { res.push_back(buildable.toJSON(store)); - }, buildable); + }, buildable.raw()); } return res; } diff --git a/src/libstore/buildable.hh b/src/libstore/buildable.hh index 0a0cf8105..ce5ae5fc0 100644 --- a/src/libstore/buildable.hh +++ b/src/libstore/buildable.hh @@ -56,11 +56,24 @@ struct DerivedPathWithHintsBuilt { static DerivedPathWithHintsBuilt parse(const Store & store, std::string_view); }; -using DerivedPathWithHints = std::variant< +using _DerivedPathWithHintsRaw = std::variant< DerivedPath::Opaque, DerivedPathWithHintsBuilt >; +struct DerivedPathWithHints : _DerivedPathWithHintsRaw { + using Raw = _DerivedPathWithHintsRaw; + using Raw::Raw; + + using Opaque = DerivedPathOpaque; + using Built = DerivedPathWithHintsBuilt; + + inline const Raw & raw() const { + return static_cast(*this); + } + +}; + typedef std::vector DerivedPathsWithHints; nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref store); diff --git a/src/nix/build.cc b/src/nix/build.cc index 0529ed382..03159b6cc 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -61,12 +61,12 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile for (const auto & [_i, buildable] : enumerate(buildables)) { auto i = _i; std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { std::string symlink = outLink; if (i) symlink += fmt("-%d", i); store2->addPermRoot(bo.path, absPath(symlink)); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { auto builtOutputs = store->queryDerivationOutputMap(bfd.drvPath); for (auto & output : builtOutputs) { std::string symlink = outLink; @@ -75,7 +75,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile store2->addPermRoot(output.second, absPath(symlink)); } }, - }, buildable); + }, buildable.raw()); } updateProfile(buildables); diff --git a/src/nix/develop.cc b/src/nix/develop.cc index cae6ded40..7cc7b85be 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -277,14 +277,14 @@ struct Common : InstallableCommand, MixProfile } }; std::visit(overloaded { - [&](const DerivedPathOpaque & bo) { + [&](const DerivedPathWithHints::Opaque & bo) { doRedirect(bo.path); }, - [&](const DerivedPathWithHintsBuilt & bfd) { + [&](const DerivedPathWithHints::Built & bfd) { for (auto & [outputName, path] : bfd.outputs) if (path) doRedirect(*path); }, - }, buildable); + }, buildable.raw()); } return rewriteStrings(script, rewrites); diff --git a/src/nix/log.cc b/src/nix/log.cc index 5010e3326..638bb5073 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -35,13 +35,13 @@ struct CmdLog : InstallableCommand RunPager pager; for (auto & sub : subs) { auto log = std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { return sub->getBuildLog(bo.path); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { return sub->getBuildLog(bfd.drvPath); }, - }, b); + }, b.raw()); if (!log) continue; stopProgressBar(); printInfo("got build log for '%s' from '%s'", installable->what(), sub->getUri()); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index ad824dd70..667904cd2 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -259,11 +259,11 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile ProfileElement element; std::visit(overloaded { - [&](DerivedPathOpaque bo) { + [&](DerivedPathWithHints::Opaque bo) { pathsToBuild.push_back(bo); element.storePaths.insert(bo.path); }, - [&](DerivedPathWithHintsBuilt bfd) { + [&](DerivedPathWithHints::Built bfd) { // TODO: Why are we querying if we know the output // names already? Is it just to figure out what the // default one is? @@ -272,7 +272,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile element.storePaths.insert(output.second); } }, - }, buildable); + }, buildable.raw()); manifest.elements.emplace_back(std::move(element)); } From d8fa7517fad4272e20ff9b9b740c91158bc685e2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 5 Apr 2021 10:33:28 -0400 Subject: [PATCH 072/510] buildable.{cc,hh} -> derived-path.{cc,hh} --- src/libcmd/installables.hh | 2 +- src/libmain/shared.hh | 2 +- src/libstore/{buildable.cc => derived-path.cc} | 2 +- src/libstore/{buildable.hh => derived-path.hh} | 0 src/libstore/path-with-outputs.hh | 2 +- src/libstore/store-api.hh | 2 +- 6 files changed, 5 insertions(+), 5 deletions(-) rename src/libstore/{buildable.cc => derived-path.cc} (98%) rename src/libstore/{buildable.hh => derived-path.hh} (100%) diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index 0bc932b52..403403c07 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -3,7 +3,7 @@ #include "util.hh" #include "path.hh" #include "path-with-outputs.hh" -#include "buildable.hh" +#include "derived-path.hh" #include "eval.hh" #include "flake/flake.hh" diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 9cb9e6da2..05277d90a 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -4,7 +4,7 @@ #include "args.hh" #include "common-args.hh" #include "path.hh" -#include "buildable.hh" +#include "derived-path.hh" #include diff --git a/src/libstore/buildable.cc b/src/libstore/derived-path.cc similarity index 98% rename from src/libstore/buildable.cc rename to src/libstore/derived-path.cc index eee38ba10..13833c58e 100644 --- a/src/libstore/buildable.cc +++ b/src/libstore/derived-path.cc @@ -1,4 +1,4 @@ -#include "buildable.hh" +#include "derived-path.hh" #include "store-api.hh" #include diff --git a/src/libstore/buildable.hh b/src/libstore/derived-path.hh similarity index 100% rename from src/libstore/buildable.hh rename to src/libstore/derived-path.hh diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index 749348398..4c4023dcb 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -3,7 +3,7 @@ #include #include "path.hh" -#include "buildable.hh" +#include "derived-path.hh" namespace nix { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 483f3c5fa..f66298991 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -2,7 +2,7 @@ #include "realisation.hh" #include "path.hh" -#include "buildable.hh" +#include "derived-path.hh" #include "hash.hh" #include "content-address.hh" #include "serialise.hh" From 125a824228dbac0bb82023953f45318ea93e7ffa Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 5 Apr 2021 10:56:48 -0400 Subject: [PATCH 073/510] Document the derived path types. --- src/libstore/derived-path.hh | 50 +++++++++++++++++++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/src/libstore/derived-path.hh b/src/libstore/derived-path.hh index ce5ae5fc0..7a2fe59de 100644 --- a/src/libstore/derived-path.hh +++ b/src/libstore/derived-path.hh @@ -2,7 +2,6 @@ #include "util.hh" #include "path.hh" -#include "path.hh" #include @@ -12,6 +11,13 @@ namespace nix { class Store; +/** + * An opaque derived path. + * + * Opaque derived paths are just store paths, and fully evaluated. They + * cannot be simplified further. Since they are opaque, they cannot be + * built, but they can fetched. + */ struct DerivedPathOpaque { StorePath path; @@ -20,6 +26,18 @@ struct DerivedPathOpaque { static DerivedPathOpaque parse(const Store & store, std::string_view); }; +/** + * A derived path that is built from a derivation + * + * Built derived paths are pair of a derivation and some output names. + * They are evaluated by building the derivation, and then replacing the + * output names with the resulting outputs. + * + * Note that does mean a derived store paths evaluates to multiple + * opaque paths, which is sort of icky as expressions are supposed to + * evaluate to single values. Perhaps this should have just a single + * output name. + */ struct DerivedPathBuilt { StorePath drvPath; std::set outputs; @@ -33,6 +51,16 @@ using _DerivedPathRaw = std::variant< DerivedPathBuilt >; +/** + * A "derived path" is a very simple sort of expression that evaluates + * to (concrete) store path. It is either: + * + * - opaque, in which case it is just a concrete store path with + * possibly no known derivation + * + * - built, in which case it is a pair of a derivation path and an + * output name. + */ struct DerivedPath : _DerivedPathRaw { using Raw = _DerivedPathRaw; using Raw::Raw; @@ -48,6 +76,11 @@ struct DerivedPath : _DerivedPathRaw { static DerivedPath parse(const Store & store, std::string_view); }; +/** + * A built derived path with hints in the form of optional concrete output paths. + * + * See 'DerivedPathWithHints' for more an explanation. + */ struct DerivedPathWithHintsBuilt { StorePath drvPath; std::map> outputs; @@ -61,6 +94,21 @@ using _DerivedPathWithHintsRaw = std::variant< DerivedPathWithHintsBuilt >; +/** + * A derived path with hints in the form of optional concrete output paths in the built case. + * + * This type is currently just used by the CLI. The paths are filled in + * during evaluation for derivations that know what paths they will + * produce in advanced, i.e. input-addressed or fixed-output content + * addressed derivations. + * + * That isn't very good, because it puts floating content-addressed + * derivations "at a disadvantage". It would be better to never rely on + * the output path of unbuilt derivations, and exclusively use the + * realizations types to work with built derivations' concrete output + * paths. + */ +// FIXME Stop using and delete this, or if that is not possible move out of libstore to libcmd. struct DerivedPathWithHints : _DerivedPathWithHintsRaw { using Raw = _DerivedPathWithHintsRaw; using Raw::Raw; From 9f28dd97ae6afc68f0574a251325336c12d60c6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn=20Gohla?= Date: Mon, 5 Apr 2021 21:24:55 +0100 Subject: [PATCH 074/510] Revert "Use upstream nlohmann_json" This reverts commit 4145cd2da002e1bd8affa0392c80118eabe58e3c. --- src/nlohmann/json.hpp | 20406 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 20406 insertions(+) create mode 100644 src/nlohmann/json.hpp diff --git a/src/nlohmann/json.hpp b/src/nlohmann/json.hpp new file mode 100644 index 000000000..c9af0bed3 --- /dev/null +++ b/src/nlohmann/json.hpp @@ -0,0 +1,20406 @@ +/* + __ _____ _____ _____ + __| | __| | | | JSON for Modern C++ +| | |__ | | | | | | version 3.5.0 +|_____|_____|_____|_|___| https://github.com/nlohmann/json + +Licensed under the MIT License . +SPDX-License-Identifier: MIT +Copyright (c) 2013-2018 Niels Lohmann . + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + +#ifndef NLOHMANN_JSON_HPP +#define NLOHMANN_JSON_HPP + +#define NLOHMANN_JSON_VERSION_MAJOR 3 +#define NLOHMANN_JSON_VERSION_MINOR 5 +#define NLOHMANN_JSON_VERSION_PATCH 0 + +#include // all_of, find, for_each +#include // assert +#include // and, not, or +#include // nullptr_t, ptrdiff_t, size_t +#include // hash, less +#include // initializer_list +#include // istream, ostream +#include // random_access_iterator_tag +#include // accumulate +#include // string, stoi, to_string +#include // declval, forward, move, pair, swap + +// #include +#ifndef NLOHMANN_JSON_FWD_HPP +#define NLOHMANN_JSON_FWD_HPP + +#include // int64_t, uint64_t +#include // map +#include // allocator +#include // string +#include // vector + +/*! +@brief namespace for Niels Lohmann +@see https://github.com/nlohmann +@since version 1.0.0 +*/ +namespace nlohmann +{ +/*! +@brief default JSONSerializer template argument + +This serializer ignores the template arguments and uses ADL +([argument-dependent lookup](https://en.cppreference.com/w/cpp/language/adl)) +for serialization. +*/ +template +struct adl_serializer; + +template class ObjectType = + std::map, + template class ArrayType = std::vector, + class StringType = std::string, class BooleanType = bool, + class NumberIntegerType = std::int64_t, + class NumberUnsignedType = std::uint64_t, + class NumberFloatType = double, + template class AllocatorType = std::allocator, + template class JSONSerializer = + adl_serializer> +class basic_json; + +/*! +@brief JSON Pointer + +A JSON pointer defines a string syntax for identifying a specific value +within a JSON document. It can be used with functions `at` and +`operator[]`. Furthermore, JSON pointers are the base for JSON patches. + +@sa [RFC 6901](https://tools.ietf.org/html/rfc6901) + +@since version 2.0.0 +*/ +template +class json_pointer; + +/*! +@brief default JSON class + +This type is the default specialization of the @ref basic_json class which +uses the standard template types. + +@since version 1.0.0 +*/ +using json = basic_json<>; +} // namespace nlohmann + +#endif + +// #include + + +// This file contains all internal macro definitions +// You MUST include macro_unscope.hpp at the end of json.hpp to undef all of them + +// exclude unsupported compilers +#if !defined(JSON_SKIP_UNSUPPORTED_COMPILER_CHECK) + #if defined(__clang__) + #if (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) < 30400 + #error "unsupported Clang version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #elif defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER)) + #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 40800 + #error "unsupported GCC version - see https://github.com/nlohmann/json#supported-compilers" + #endif + #endif +#endif + +// disable float-equal warnings on GCC/clang +#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wfloat-equal" +#endif + +// disable documentation warnings on clang +#if defined(__clang__) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wdocumentation" +#endif + +// allow for portable deprecation warnings +#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) + #define JSON_DEPRECATED __attribute__((deprecated)) +#elif defined(_MSC_VER) + #define JSON_DEPRECATED __declspec(deprecated) +#else + #define JSON_DEPRECATED +#endif + +// allow to disable exceptions +#if (defined(__cpp_exceptions) || defined(__EXCEPTIONS) || defined(_CPPUNWIND)) && !defined(JSON_NOEXCEPTION) + #define JSON_THROW(exception) throw exception + #define JSON_TRY try + #define JSON_CATCH(exception) catch(exception) + #define JSON_INTERNAL_CATCH(exception) catch(exception) +#else + #define JSON_THROW(exception) std::abort() + #define JSON_TRY if(true) + #define JSON_CATCH(exception) if(false) + #define JSON_INTERNAL_CATCH(exception) if(false) +#endif + +// override exception macros +#if defined(JSON_THROW_USER) + #undef JSON_THROW + #define JSON_THROW JSON_THROW_USER +#endif +#if defined(JSON_TRY_USER) + #undef JSON_TRY + #define JSON_TRY JSON_TRY_USER +#endif +#if defined(JSON_CATCH_USER) + #undef JSON_CATCH + #define JSON_CATCH JSON_CATCH_USER + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_CATCH_USER +#endif +#if defined(JSON_INTERNAL_CATCH_USER) + #undef JSON_INTERNAL_CATCH + #define JSON_INTERNAL_CATCH JSON_INTERNAL_CATCH_USER +#endif + +// manual branch prediction +#if defined(__clang__) || defined(__GNUC__) || defined(__GNUG__) + #define JSON_LIKELY(x) __builtin_expect(!!(x), 1) + #define JSON_UNLIKELY(x) __builtin_expect(!!(x), 0) +#else + #define JSON_LIKELY(x) x + #define JSON_UNLIKELY(x) x +#endif + +// C++ language standard detection +#if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464 + #define JSON_HAS_CPP_17 + #define JSON_HAS_CPP_14 +#elif (defined(__cplusplus) && __cplusplus >= 201402L) || (defined(_HAS_CXX14) && _HAS_CXX14 == 1) + #define JSON_HAS_CPP_14 +#endif + +/*! +@brief macro to briefly define a mapping between an enum and JSON +@def NLOHMANN_JSON_SERIALIZE_ENUM +@since version 3.4.0 +*/ +#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ + template \ + inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [e](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.first == e; \ + }); \ + j = ((it != std::end(m)) ? it : std::begin(m))->second; \ + } \ + template \ + inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ + { \ + static_assert(std::is_enum::value, #ENUM_TYPE " must be an enum!"); \ + static const std::pair m[] = __VA_ARGS__; \ + auto it = std::find_if(std::begin(m), std::end(m), \ + [j](const std::pair& ej_pair) -> bool \ + { \ + return ej_pair.second == j; \ + }); \ + e = ((it != std::end(m)) ? it : std::begin(m))->first; \ + } + +// Ugly macros to avoid uglier copy-paste when specializing basic_json. They +// may be removed in the future once the class is split. + +#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \ + template class ObjectType, \ + template class ArrayType, \ + class StringType, class BooleanType, class NumberIntegerType, \ + class NumberUnsignedType, class NumberFloatType, \ + template class AllocatorType, \ + template class JSONSerializer> + +#define NLOHMANN_BASIC_JSON_TPL \ + basic_json + +// #include + + +#include // not +#include // size_t +#include // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type + +namespace nlohmann +{ +namespace detail +{ +// alias templates to reduce boilerplate +template +using enable_if_t = typename std::enable_if::type; + +template +using uncvref_t = typename std::remove_cv::type>::type; + +// implementation of C++14 index_sequence and affiliates +// source: https://stackoverflow.com/a/32223343 +template +struct index_sequence +{ + using type = index_sequence; + using value_type = std::size_t; + static constexpr std::size_t size() noexcept + { + return sizeof...(Ints); + } +}; + +template +struct merge_and_renumber; + +template +struct merge_and_renumber, index_sequence> + : index_sequence < I1..., (sizeof...(I1) + I2)... > {}; + +template +struct make_index_sequence + : merge_and_renumber < typename make_index_sequence < N / 2 >::type, + typename make_index_sequence < N - N / 2 >::type > {}; + +template<> struct make_index_sequence<0> : index_sequence<> {}; +template<> struct make_index_sequence<1> : index_sequence<0> {}; + +template +using index_sequence_for = make_index_sequence; + +// dispatch utility (taken from ranges-v3) +template struct priority_tag : priority_tag < N - 1 > {}; +template<> struct priority_tag<0> {}; + +// taken from ranges-v3 +template +struct static_const +{ + static constexpr T value{}; +}; + +template +constexpr T static_const::value; +} // namespace detail +} // namespace nlohmann + +// #include + + +#include // not +#include // numeric_limits +#include // false_type, is_constructible, is_integral, is_same, true_type +#include // declval + +// #include + +// #include + + +#include // random_access_iterator_tag + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template struct make_void +{ + using type = void; +}; +template using void_t = typename make_void::type; +} // namespace detail +} // namespace nlohmann + +// #include + + +namespace nlohmann +{ +namespace detail +{ +template +struct iterator_types {}; + +template +struct iterator_types < + It, + void_t> +{ + using difference_type = typename It::difference_type; + using value_type = typename It::value_type; + using pointer = typename It::pointer; + using reference = typename It::reference; + using iterator_category = typename It::iterator_category; +}; + +// This is required as some compilers implement std::iterator_traits in a way that +// doesn't work with SFINAE. See https://github.com/nlohmann/json/issues/1341. +template +struct iterator_traits +{ +}; + +template +struct iterator_traits < T, enable_if_t < !std::is_pointer::value >> + : iterator_types +{ +}; + +template +struct iterator_traits::value>> +{ + using iterator_category = std::random_access_iterator_tag; + using value_type = T; + using difference_type = ptrdiff_t; + using pointer = T*; + using reference = T&; +}; +} +} + +// #include + +// #include + + +#include + +// #include + + +// http://en.cppreference.com/w/cpp/experimental/is_detected +namespace nlohmann +{ +namespace detail +{ +struct nonesuch +{ + nonesuch() = delete; + ~nonesuch() = delete; + nonesuch(nonesuch const&) = delete; + void operator=(nonesuch const&) = delete; +}; + +template class Op, + class... Args> +struct detector +{ + using value_t = std::false_type; + using type = Default; +}; + +template class Op, class... Args> +struct detector>, Op, Args...> +{ + using value_t = std::true_type; + using type = Op; +}; + +template