diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 39d595199..526fecabf 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -10,16 +10,8 @@
# This file
.github/CODEOWNERS @edolstra
-# Public documentation
-/doc @fricklerhandwerk
-*.md @fricklerhandwerk
-
# Documentation of built-in functions
-src/libexpr/primops.cc @fricklerhandwerk @roberth
-# Documentation on experimental features
-src/libutil/experimental-features.cc @fricklerhandwerk
-# Documentation on configuration settings
-src/libstore/globals.hh @fricklerhandwerk
+src/libexpr/primops.cc @roberth
# Libstore layer
/src/libstore @thufschmitt
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 217b19108..d12a4d36c 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -10,6 +10,8 @@
-# Priorities
+# Priorities and Process
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
+
+The Nix maintainer team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19) to [schedule and track reviews](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol).
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index 85ddcfad3..f003114ba 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -21,7 +21,7 @@ jobs:
fetch-depth: 0
- name: Create backport PRs
# should be kept in sync with `version`
- uses: zeebe-io/backport-action@v2.2.0
+ uses: zeebe-io/backport-action@v2.3.0
with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/Makefile b/Makefile
index 3dae8b394..1fdb6e897 100644
--- a/Makefile
+++ b/Makefile
@@ -24,7 +24,7 @@ makefiles = \
misc/upstart/local.mk
endif
-ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
+ifeq ($(ENABLE_UNIT_TESTS), yes)
makefiles += \
tests/unit/libutil/local.mk \
tests/unit/libutil-support/local.mk \
@@ -34,16 +34,13 @@ makefiles += \
tests/unit/libexpr-support/local.mk
endif
-ifeq ($(ENABLE_TESTS), yes)
+ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
makefiles += \
tests/functional/local.mk \
tests/functional/ca/local.mk \
tests/functional/dyn-drv/local.mk \
tests/functional/test-libstoreconsumer/local.mk \
tests/functional/plugins/local.mk
-else
-makefiles += \
- mk/disable-tests.mk
endif
OPTIMIZE = 1
@@ -57,11 +54,40 @@ endif
include mk/lib.mk
+# Must be included after `mk/lib.mk` so isn't the default target.
+ifneq ($(ENABLE_UNIT_TESTS), yes)
+.PHONY: check
+check:
+ @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'."
+ @exit 1
+endif
+
+ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes)
+.PHONY: installcheck
+installcheck:
+ @echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'."
+ @exit 1
+endif
+
# Must be included after `mk/lib.mk` so rules refer to variables defined
# by the library. Rules are not "lazy" like variables, unfortunately.
-ifeq ($(ENABLE_BUILD), yes)
+
+ifeq ($(ENABLE_DOC_GEN), yes)
$(eval $(call include-sub-makefile, doc/manual/local.mk))
+else
+.PHONY: manual-html manpages
+manual-html manpages:
+ @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
+ @exit 1
endif
+
+ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
+else
+.PHONY: internal-api-html
+internal-api-html:
+ @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
+ @exit 1
+endif
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
diff --git a/Makefile.config.in b/Makefile.config.in
index c85e028c2..21a9f41ec 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -9,8 +9,11 @@ CXXFLAGS = @CXXFLAGS@
CXXLTO = @CXXLTO@
EDITLINE_LIBS = @EDITLINE_LIBS@
ENABLE_BUILD = @ENABLE_BUILD@
+ENABLE_DOC_GEN = @ENABLE_DOC_GEN@
+ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@
+ENABLE_INTERNAL_API_DOCS = @ENABLE_INTERNAL_API_DOCS@
ENABLE_S3 = @ENABLE_S3@
-ENABLE_TESTS = @ENABLE_TESTS@
+ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@
GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@
@@ -36,12 +39,10 @@ checkbindir = @checkbindir@
checklibdir = @checklibdir@
datadir = @datadir@
datarootdir = @datarootdir@
-doc_generate = @doc_generate@
docdir = @docdir@
embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@
includedir = @includedir@
-internal_api_docs = @internal_api_docs@
libdir = @libdir@
libexecdir = @libexecdir@
localstatedir = @localstatedir@
diff --git a/configure.ac b/configure.ac
index a949f9df2..1bc4f17b0 100644
--- a/configure.ac
+++ b/configure.ac
@@ -138,20 +138,38 @@ AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
AC_SUBST(ENABLE_BUILD)
-# Building without tests is useful for bootstrapping with a smaller footprint
+# Building without unit tests is useful for bootstrapping with a smaller footprint
# or running the tests in a separate derivation. Otherwise, we do compile and
# run them.
-AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
- ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
-AC_SUBST(ENABLE_TESTS)
-# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
-AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
- internal_api_docs=$enableval, internal_api_docs=no)
-AC_SUBST(internal_api_docs)
+AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]),
+ ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD)
+AC_SUBST(ENABLE_UNIT_TESTS)
AS_IF(
- [test "$ENABLE_BUILD" == "yes" || test "$ENABLE_TEST" == "yes"],
+ [test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"],
+ [AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])])
+
+AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]),
+ ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes)
+AC_SUBST(ENABLE_FUNCTIONAL_TESTS)
+
+# documentation generation switch
+AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
+ ENABLE_DOC_GEN=$enableval, ENABLE_DOC_GEN=$ENABLE_BUILD)
+AC_SUBST(ENABLE_DOC_GEN)
+
+AS_IF(
+ [test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"],
+ [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])])
+
+# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
+AC_ARG_ENABLE(internal-api-docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
+ ENABLE_INTERNAL_API_DOCS=$enableval, ENABLE_INTERNAL_API_DOCS=no)
+AC_SUBST(ENABLE_INTERNAL_API_DOCS)
+
+AS_IF(
+ [test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"],
[NEED_PROG(jq, jq)])
AS_IF([test "$ENABLE_BUILD" == "yes"],[
@@ -317,7 +335,7 @@ if test "$gc" = yes; then
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
fi
-AS_IF([test "$ENABLE_TESTS" == "yes"],[
+AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
# Look for gtest.
PKG_CHECK_MODULES([GTEST], [gtest_main])
@@ -349,11 +367,6 @@ AC_LANG_POP(C++)
# Look for nlohmann/json.
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
-# documentation generation switch
-AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
- doc_generate=$enableval, doc_generate=yes)
-AC_SUBST(doc_generate)
-
# Look for lowdown library.
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
diff --git a/doc/internal-api/local.mk b/doc/internal-api/local.mk
index 890f341b7..bf2c4dede 100644
--- a/doc/internal-api/local.mk
+++ b/doc/internal-api/local.mk
@@ -1,19 +1,7 @@
-.PHONY: internal-api-html
-
-ifeq ($(internal_api_docs), yes)
-
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
mkdir -p $(docdir)/internal-api
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
# Generate the HTML API docs for Nix's unstable internal interfaces.
+.PHONY: internal-api-html
internal-api-html: $(docdir)/internal-api/html/index.html
-
-else
-
-# Make a nicer error message
-internal-api-html:
- @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
- @exit 1
-
-endif
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 456000d3d..b77168885 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -1,5 +1,3 @@
-ifeq ($(doc_generate),yes)
-
# The version of Nix used to generate the doc. Can also be
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
# if one prefers.
@@ -180,6 +178,8 @@ manual-html: $(docdir)/manual/index.html
install: $(docdir)/manual/index.html
# Generate 'nix' manpages.
+.PHONY: manpages
+manpages: $(mandir)/man1/nix3-manpages
install: $(mandir)/man1/nix3-manpages
man: doc/manual/generated/man1/nix3-manpages
all: doc/manual/generated/man1/nix3-manpages
@@ -225,5 +225,3 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/
@rm -rf $(DESTDIR)$(docdir)/manual
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
-
-endif
diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md
new file mode 100644
index 000000000..f5953bd72
--- /dev/null
+++ b/doc/manual/rl-next/nix-profile-names.md
@@ -0,0 +1,6 @@
+---
+synopsis: "`nix profile` now allows referring to elements by human-readable name"
+prs: 8678
+---
+
+[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version.
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 4d3d66397..dce0422dc 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -64,6 +64,27 @@ $ nix build
You can also build Nix for one of the [supported platforms](#platforms).
+## Makefile variables
+
+You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run
+`make install`.
+
+You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment
+variables to override `Makefile` variables.
+
+- `ENABLE_BUILD=yes` to enable building the C++ code.
+- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.).
+
+ The docs can take a while to build, so you may want to disable this for local development.
+- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests.
+- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests.
+- `OPTIMIZE=1` to enable optimizations.
+- `libraries=libutil programs=` to only build a specific library (this will
+ fail in the linking phase if you don't have the other libraries built, but is
+ useful for checking types).
+- `libraries= programs=nix` to only build a specific program (this will not, in
+ general, work, because the programs need the libraries).
+
## Building Nix
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md
index d4babf1ea..807e82517 100644
--- a/doc/manual/src/installation/prerequisites-source.md
+++ b/doc/manual/src/installation/prerequisites-source.md
@@ -72,7 +72,7 @@
This is an optional dependency and can be disabled
by providing a `--disable-cpuid` to the `configure` script.
- - Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and
+ - Unless `./configure --disable-unit-tests` is specified, GoogleTest (GTest) and
RapidCheck are required, which are available at
and
respectively.
diff --git a/flake.nix b/flake.nix
index 8c4436729..a8fc105e8 100644
--- a/flake.nix
+++ b/flake.nix
@@ -395,7 +395,7 @@
stdenvs)));
devShells = let
- makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (attrs: {
+ makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: {
installFlags = "sysconfdir=$(out)/etc";
shellHook = ''
PATH=$prefix/bin:$PATH
diff --git a/mk/disable-tests.mk b/mk/disable-tests.mk
deleted file mode 100644
index f72f84412..000000000
--- a/mk/disable-tests.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-# This file is only active for `./configure --disable-tests`.
-# Running `make check` or `make installcheck` would indicate a mistake in the
-# caller.
-
-installcheck:
- @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
- @exit 1
-
-# This currently has little effect.
-check:
- @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
- @exit 1
diff --git a/package.nix b/package.nix
index 24395b484..b5ff45083 100644
--- a/package.nix
+++ b/package.nix
@@ -87,6 +87,9 @@
, test-daemon ? null
, test-client ? null
+# Avoid setting things that would interfere with a functioning devShell
+, forDevShell ? false
+
# Not a real argument, just the only way to approximate let-binding some
# stuff for argument defaults.
, __forDefaults ? {
@@ -104,30 +107,6 @@ let
inherit doBuild doCheck doInstallCheck;
};
- filesets = {
- baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
-
- configureFiles = fileset.unions [
- ./.version
- ./configure.ac
- ./m4
- # TODO: do we really need README.md? It doesn't seem used in the build.
- ./README.md
- ];
-
- topLevelBuildFiles = fileset.unions [
- ./local.mk
- ./Makefile
- ./Makefile.config.in
- ./mk
- ];
-
- functionalTestFiles = fileset.unions [
- ./tests/functional
- (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
- ];
- };
-
mkDerivation =
if withCoverageChecks
then
@@ -151,32 +130,44 @@ mkDerivation (finalAttrs: let
# to be run later, requiresthe unit tests to be built.
buildUnitTests = doCheck || installUnitTests;
- anySortOfTesting = buildUnitTests || doInstallCheck;
-
in {
inherit pname version;
src =
let
-
+ baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
in
fileset.toSource {
root = ./.;
- fileset = fileset.intersect filesets.baseFiles (fileset.unions ([
- filesets.configureFiles
- filesets.topLevelBuildFiles
- ./doc/internal-api
+ fileset = fileset.intersect baseFiles (fileset.unions ([
+ # For configure
+ ./.version
+ ./configure.ac
+ ./m4
+ # TODO: do we really need README.md? It doesn't seem used in the build.
+ ./README.md
+ # For make, regardless of what we are building
+ ./local.mk
+ ./Makefile
+ ./Makefile.config.in
+ ./mk
+ (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
] ++ lib.optionals doBuild [
./boehmgc-coroutine-sp-fallback.diff
./doc
./misc
./precompiled-headers.h
./src
- ./tests/unit
./COPYING
./scripts/local.mk
- ] ++ lib.optionals anySortOfTesting [
- filesets.functionalTestFiles
+ ] ++ lib.optionals buildUnitTests [
+ ./doc/manual
+ ] ++ lib.optionals enableInternalAPIDocs [
+ ./doc/internal-api
+ ] ++ lib.optionals buildUnitTests [
+ ./tests/unit
+ ] ++ lib.optionals doInstallCheck [
+ ./tests/functional
]));
};
@@ -275,12 +266,14 @@ in {
);
configureFlags = [
- "--sysconfdir=/etc"
(lib.enableFeature doBuild "build")
- (lib.enableFeature anySortOfTesting "tests")
+ (lib.enableFeature buildUnitTests "unit-tests")
+ (lib.enableFeature doInstallCheck "functional-tests")
(lib.enableFeature enableInternalAPIDocs "internal-api-docs")
(lib.enableFeature enableManual "doc-gen")
(lib.enableFeature installUnitTests "install-unit-tests")
+ ] ++ lib.optionals (!forDevShell) [
+ "--sysconfdir=/etc"
] ++ lib.optionals installUnitTests [
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
@@ -310,10 +303,7 @@ in {
'';
postInstall = lib.optionalString doBuild (
- ''
- mkdir -p $doc/nix-support
- echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
- '' + lib.optionalString stdenv.hostPlatform.isStatic ''
+ lib.optionalString stdenv.hostPlatform.isStatic ''
mkdir -p $out/nix-support
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'' + lib.optionalString stdenv.isDarwin ''
@@ -322,7 +312,10 @@ in {
$out/lib/libboost_context.dylib \
$out/lib/libnixutil.dylib
''
- ) + lib.optionalString enableInternalAPIDocs ''
+ ) + lib.optionalString enableManual ''
+ mkdir -p ''${!outputDoc}/nix-support
+ echo "doc manual ''${!outputDoc}/share/doc/nix/manual" >> ''${!outputDoc}/nix-support/hydra-build-products
+ '' + lib.optionalString enableInternalAPIDocs ''
mkdir -p ''${!outputDoc}/nix-support
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
'';
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index 82c7db608..4964b8a34 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -13,6 +13,7 @@
#include "globals.hh"
#include "store-api.hh"
#include "crypto.hh"
+#include "posix-source-accessor.hh"
#include
#include
@@ -205,7 +206,10 @@ void importPaths(int fd, int dontCheckSigs)
SV * hashPath(char * algo, int base32, char * path)
PPCODE:
try {
- Hash h = hashPath(parseHashAlgo(algo), path).first;
+ PosixSourceAccessor accessor;
+ Hash h = hashPath(
+ accessor, CanonPath::fromCwd(path),
+ FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
@@ -281,7 +285,11 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
PPCODE:
try {
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
- auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
+ PosixSourceAccessor accessor;
+ auto path = store()->addToStore(
+ std::string(baseNameOf(srcPath)),
+ accessor, CanonPath::fromCwd(srcPath),
+ method, parseHashAlgo(algo));
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index 0326d3415..766f81bde 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -3,11 +3,13 @@
set -eu
set -o pipefail
+# System specific settings
+export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}"
+export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
+
readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
# create by default; set 0 to DIY, use a symlink, etc.
readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default
-NIX_FIRST_BUILD_UID="301"
-NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
# caution: may update times on / if not run as normal non-root user
read_only_root() {
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index a08f62333..ad3ee8881 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -25,9 +25,9 @@ readonly RED='\033[31m'
readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32}
readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}"
readonly NIX_BUILD_GROUP_NAME="nixbld"
-# darwin installer needs to override these
-NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
-NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
+# each system specific installer must set these:
+# NIX_FIRST_BUILD_UID
+# NIX_BUILD_USER_NAME_TEMPLATE
# Please don't change this. We don't support it, because the
# default shell profile that comes with Nix doesn't support it.
readonly NIX_ROOT="/nix"
@@ -707,6 +707,12 @@ EOF
fi
}
+check_required_system_specific_settings() {
+ if [ -z "${NIX_FIRST_BUILD_UID+x}" ] || [ -z "${NIX_BUILD_USER_NAME_TEMPLATE+x}" ]; then
+ failure "Internal error: System specific installer for $(uname) ($1) does not export required settings."
+ fi
+}
+
welcome_to_nix() {
local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))"
local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}")
@@ -726,7 +732,9 @@ manager. This will happen in a few stages:
if you are ready to continue.
3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT}
- that the Nix daemon uses to run builds.
+ that the Nix daemon uses to run builds. To create system users
+ in a different range, exit and run this tool again with
+ NIX_FIRST_BUILD_UID set.
4. Perform the basic installation of the Nix files daemon.
@@ -968,13 +976,16 @@ main() {
if is_os_darwin; then
# shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
+ check_required_system_specific_settings "install-darwin-multi-user.sh"
elif is_os_linux; then
# shellcheck source=./install-systemd-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
+ check_required_system_specific_settings "install-systemd-multi-user.sh"
else
failure "Sorry, I don't know what to do on $(uname)"
fi
+
welcome_to_nix
if ! is_root; then
diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh
index 07b34033a..202a9bb54 100755
--- a/scripts/install-systemd-multi-user.sh
+++ b/scripts/install-systemd-multi-user.sh
@@ -3,6 +3,10 @@
set -eu
set -o pipefail
+# System specific settings
+export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
+export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
+
readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service
readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service
diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc
index 08ad35105..bdc34bbe3 100644
--- a/src/libcmd/installable-value.cc
+++ b/src/libcmd/installable-value.cc
@@ -44,7 +44,7 @@ ref InstallableValue::require(ref installable)
std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
{
if (v.type() == nPath) {
- auto storePath = v.path().fetchToStore(state->store);
+ auto storePath = v.path().fetchToStore(*state->store);
return {{
.path = DerivedPath::Opaque {
.path = std::move(storePath),
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 1552e3e92..c9c25c898 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2317,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
auto dstPath = i != srcToStore.end()
? i->second
: [&]() {
- auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
+ auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
allowPath(dstPath);
srcToStore.insert_or_assign(path, dstPath);
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 8b0eb7460..86a0982f3 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -190,7 +190,7 @@ std::optional> parseFlakeIdRef(
static std::regex flakeRegex(
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
- + "(?:#(" + queryRegex + "))?",
+ + "(?:#(" + fragmentRegex + "))?",
std::regex::ECMAScript);
if (std::regex_match(url, match, flakeRegex)) {
diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc
new file mode 100644
index 000000000..7e51aa2e1
--- /dev/null
+++ b/src/libexpr/flake/url-name.cc
@@ -0,0 +1,48 @@
+#include "url-name.hh"
+#include
+#include
+
+namespace nix {
+
+static const std::string attributeNamePattern("[a-z0-9_-]+");
+static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
+static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
+static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
+static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
+static const std::regex gitProviderRegex("github|gitlab|sourcehut");
+static const std::regex gitSchemeRegex("git($|\\+.*)");
+static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
+
+std::optional getNameFromURL(const ParsedURL & url)
+{
+ std::smatch match;
+
+ /* If there is a dir= argument, use its value */
+ if (url.query.count("dir") > 0)
+ return url.query.at("dir");
+
+ /* If the fragment isn't a "default" and contains two attribute elements, use the last one */
+ if (std::regex_match(url.fragment, match, lastAttributeRegex))
+ return match.str(1);
+
+ /* If this is a github/gitlab/sourcehut flake, use the repo name */
+ if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
+ return match.str(1);
+
+ /* If it is a regular git flake, use the directory name */
+ if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
+ return match.str(1);
+
+ /* If everything failed but there is a non-default fragment, use it in full */
+ if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
+ return url.fragment;
+
+ /* If there is no fragment, take the last element of the path */
+ if (std::regex_match(url.path, match, lastPathSegmentRegex))
+ return match.str(1);
+
+ /* If even that didn't work, the URL does not contain enough info to determine a useful name */
+ return {};
+}
+
+}
diff --git a/src/libexpr/flake/url-name.hh b/src/libexpr/flake/url-name.hh
new file mode 100644
index 000000000..6f32754d2
--- /dev/null
+++ b/src/libexpr/flake/url-name.hh
@@ -0,0 +1,20 @@
+#include "url.hh"
+#include "url-parts.hh"
+#include "util.hh"
+#include "split.hh"
+
+namespace nix {
+
+/**
+ * Try to extract a reasonably unique and meaningful, human-readable
+ * name of a flake output from a parsed URL.
+ * When nullopt is returned, the callsite should use information available
+ * to it outside of the URL to determine a useful name.
+ * This is a heuristic approach intended for user interfaces.
+ * @return nullopt if the extracted name is not useful to identify a
+ * flake output, for example because it is empty or "default".
+ * Otherwise returns the extracted name.
+ */
+std::optional getNameFromURL(const ParsedURL & url);
+
+}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index d78a28c73..1ca4a2541 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -438,9 +438,7 @@ static RegisterPrimOp primop_isNull({
.doc = R"(
Return `true` if *e* evaluates to `null`, and `false` otherwise.
- > **Warning**
- >
- > This function is *deprecated*; just write `e == null` instead.
+ This is equivalent to `e == null`.
)",
.fun = prim_isNull,
});
@@ -2072,8 +2070,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
}
auto storePath = settings.readOnlyMode
- ? state.store->computeStorePathForText(name, contents, refs)
- : state.store->addTextToStore(name, contents, refs, state.repair);
+ ? state.store->makeFixedOutputPathFromCA(name, TextInfo {
+ .hash = hashString(HashAlgorithm::SHA256, contents),
+ .references = std::move(refs),
+ })
+ : ({
+ StringSource s { contents };
+ state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
+ });
/* Note: we don't need to add `context' to the context of the
result, since `storePath' itself has references to the paths
@@ -2229,7 +2233,7 @@ static void addPath(
});
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
- auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
+ auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair);
if (expectedHash && expectedStorePath != dstPath)
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
state.allowAndSetStorePathString(dstPath, v);
diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc
index 63b05bdab..e071b4717 100644
--- a/src/libfetchers/cache.cc
+++ b/src/libfetchers/cache.cc
@@ -106,7 +106,7 @@ struct CacheImpl : Cache
}
void add(
- ref store,
+ Store & store,
const Attrs & inAttrs,
const Attrs & infoAttrs,
const StorePath & storePath,
@@ -115,13 +115,13 @@ struct CacheImpl : Cache
_state.lock()->add.use()
(attrsToJSON(inAttrs).dump())
(attrsToJSON(infoAttrs).dump())
- (store->printStorePath(storePath))
+ (store.printStorePath(storePath))
(locked)
(time(0)).exec();
}
std::optional> lookup(
- ref store,
+ Store & store,
const Attrs & inAttrs) override
{
if (auto res = lookupExpired(store, inAttrs)) {
@@ -134,7 +134,7 @@ struct CacheImpl : Cache
}
std::optional lookupExpired(
- ref store,
+ Store & store,
const Attrs & inAttrs) override
{
auto state(_state.lock());
@@ -148,19 +148,19 @@ struct CacheImpl : Cache
}
auto infoJSON = stmt.getStr(0);
- auto storePath = store->parseStorePath(stmt.getStr(1));
+ auto storePath = store.parseStorePath(stmt.getStr(1));
auto locked = stmt.getInt(2) != 0;
auto timestamp = stmt.getInt(3);
- store->addTempRoot(storePath);
- if (!store->isValidPath(storePath)) {
+ store.addTempRoot(storePath);
+ if (!store.isValidPath(storePath)) {
// FIXME: we could try to substitute 'storePath'.
debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
return {};
}
debug("using cache entry '%s' -> '%s', '%s'",
- inAttrsJSON, infoJSON, store->printStorePath(storePath));
+ inAttrsJSON, infoJSON, store.printStorePath(storePath));
return Result {
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh
index f70589267..791d77025 100644
--- a/src/libfetchers/cache.hh
+++ b/src/libfetchers/cache.hh
@@ -50,14 +50,14 @@ struct Cache
/* Old cache for things that have a store path. */
virtual void add(
- ref store,
+ Store & store,
const Attrs & inAttrs,
const Attrs & infoAttrs,
const StorePath & storePath,
bool locked) = 0;
virtual std::optional> lookup(
- ref store,
+ Store & store,
const Attrs & inAttrs) = 0;
struct Result
@@ -68,7 +68,7 @@ struct Cache
};
virtual std::optional lookupExpired(
- ref store,
+ Store & store,
const Attrs & inAttrs) = 0;
};
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 7ec1f9802..f309e5993 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
std::pair InputScheme::fetch(ref store, const Input & input)
{
auto [accessor, input2] = getAccessor(store, input);
- auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
+ auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName());
return {storePath, input2};
}
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 5dac66930..01cd28427 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -368,14 +368,14 @@ struct GitInputScheme : InputScheme
RepoInfo getRepoInfo(const Input & input) const
{
- auto checkHashType = [&](const std::optional & hash)
+ auto checkHashAlgorithm = [&](const std::optional & hash)
{
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};
if (auto rev = input.getRev())
- checkHashType(rev);
+ checkHashAlgorithm(rev);
RepoInfo repoInfo;
diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc
index 1f793bf1d..a647f5915 100644
--- a/src/libfetchers/input-accessor.cc
+++ b/src/libfetchers/input-accessor.cc
@@ -5,10 +5,10 @@
namespace nix {
StorePath InputAccessor::fetchToStore(
- ref store,
+ Store & store,
const CanonPath & path,
std::string_view name,
- FileIngestionMethod method,
+ ContentAddressMethod method,
PathFilter * filter,
RepairFlag repair)
{
@@ -20,10 +20,24 @@ StorePath InputAccessor::fetchToStore(
if (!filter && fingerprint) {
cacheKey = fetchers::Attrs{
{"_what", "fetchToStore"},
- {"store", store->storeDir},
+ {"store", store.storeDir},
{"name", std::string(name)},
{"fingerprint", *fingerprint},
- {"method", (uint8_t) method},
+ {
+ "method",
+ std::visit(overloaded {
+ [](const TextIngestionMethod &) {
+ return "text";
+ },
+ [](const FileIngestionMethod & fim) {
+ switch (fim) {
+ case FileIngestionMethod::Flat: return "flat";
+ case FileIngestionMethod::Recursive: return "nar";
+ default: assert(false);
+ }
+ },
+ }, method.raw),
+ },
{"path", path.abs()}
};
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
@@ -35,17 +49,14 @@ StorePath InputAccessor::fetchToStore(
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
- auto source = sinkToSource([&](Sink & sink) {
- if (method == FileIngestionMethod::Recursive)
- dumpPath(path, sink, filter ? *filter : defaultPathFilter);
- else
- readFile(path, sink);
- });
+ auto filter2 = filter ? *filter : defaultPathFilter;
auto storePath =
settings.readOnlyMode
- ? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
- : store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
+ ? store.computeStorePath(
+ name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first
+ : store.addToStore(
+ name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
if (cacheKey)
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
@@ -60,9 +71,9 @@ std::ostream & operator << (std::ostream & str, const SourcePath & path)
}
StorePath SourcePath::fetchToStore(
- ref store,
+ Store & store,
std::string_view name,
- FileIngestionMethod method,
+ ContentAddressMethod method,
PathFilter * filter,
RepairFlag repair) const
{
diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh
index f385e6231..d2a21cb4b 100644
--- a/src/libfetchers/input-accessor.hh
+++ b/src/libfetchers/input-accessor.hh
@@ -30,10 +30,10 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this store,
+ Store & store,
const CanonPath & path,
std::string_view name = "source",
- FileIngestionMethod method = FileIngestionMethod::Recursive,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair);
};
@@ -116,9 +116,9 @@ struct SourcePath
* Copy this `SourcePath` to the Nix store.
*/
StorePath fetchToStore(
- ref store,
+ Store & store,
std::string_view name = "source",
- FileIngestionMethod method = FileIngestionMethod::Recursive,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair) const;
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 6056b9a3c..9982389ab 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -6,6 +6,7 @@
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
+#include "posix-source-accessor.hh"
#include "fetch-settings.hh"
@@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
- auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
+ PosixSourceAccessor accessor;
+ auto storePath = store->addToStore(
+ input.getName(),
+ accessor, CanonPath { actualPath },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
+ filter);
return {std::move(storePath), input};
}
@@ -218,7 +224,7 @@ struct MercurialInputScheme : InputScheme
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
- auto checkHashType = [&](const std::optional & hash)
+ auto checkHashAlgorithm = [&](const std::optional & hash)
{
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
@@ -227,7 +233,7 @@ struct MercurialInputScheme : InputScheme
auto getLockedAttrs = [&]()
{
- checkHashType(input.getRev());
+ checkHashAlgorithm(input.getRev());
return Attrs({
{"type", "hg"},
@@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme
};
if (input.getRev()) {
- if (auto res = getCache()->lookup(store, getLockedAttrs()))
+ if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
}
@@ -259,7 +265,7 @@ struct MercurialInputScheme : InputScheme
{"ref", *input.getRef()},
});
- if (auto res = getCache()->lookup(store, unlockedAttrs)) {
+ if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
@@ -305,7 +311,7 @@ struct MercurialInputScheme : InputScheme
auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]);
- if (auto res = getCache()->lookup(store, getLockedAttrs()))
+ if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
Path tmpDir = createTempDir();
@@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme
deletePath(tmpDir + "/.hg_archival.txt");
- auto storePath = store->addToStore(name, tmpDir);
+ PosixSourceAccessor accessor;
+ auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir });
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
@@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme
if (!_input.getRev())
getCache()->add(
- store,
+ *store,
unlockedAttrs,
infoAttrs,
storePath,
false);
getCache()->add(
- store,
+ *store,
getLockedAttrs(),
infoAttrs,
storePath,
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 086366180..3b7709440 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -8,6 +8,7 @@
#include "tarfile.hh"
#include "types.hh"
#include "split.hh"
+#include "posix-source-accessor.hh"
namespace nix::fetchers {
@@ -26,7 +27,7 @@ DownloadFileResult downloadFile(
{"name", name},
});
- auto cached = getCache()->lookupExpired(store, inAttrs);
+ auto cached = getCache()->lookupExpired(*store, inAttrs);
auto useCached = [&]() -> DownloadFileResult
{
@@ -91,7 +92,7 @@ DownloadFileResult downloadFile(
}
getCache()->add(
- store,
+ *store,
inAttrs,
infoAttrs,
*storePath,
@@ -99,7 +100,7 @@ DownloadFileResult downloadFile(
if (url != res.effectiveUri)
getCache()->add(
- store,
+ *store,
{
{"type", "file"},
{"url", res.effectiveUri},
@@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball(
{"name", name},
});
- auto cached = getCache()->lookupExpired(store, inAttrs);
+ auto cached = getCache()->lookupExpired(*store, inAttrs);
if (cached && !cached->expired)
return {
@@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
- unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
+ PosixSourceAccessor accessor;
+ unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({
@@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball(
infoAttrs.emplace("immutableUrl", *res.immutableUrl);
getCache()->add(
- store,
+ *store,
inAttrs,
infoAttrs,
*unpackedStorePath,
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 2837e8934..8a3052433 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -12,6 +12,7 @@
#include "thread-pool.hh"
#include "callback.hh"
#include "signals.hh"
+#include "archive.hh"
#include
#include
@@ -300,24 +301,60 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}});
}
-StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
+StorePath BinaryCacheStore::addToStoreFromDump(
+ Source & dump,
+ std::string_view name,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ RepairFlag repair)
{
- if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
- unsupported("addToStoreFromDump");
- return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
+ std::optional caHash;
+ std::string nar;
+
+ if (auto * dump2p = dynamic_cast(&dump)) {
+ auto & dump2 = *dump2p;
+ // Hack, this gives us a "replayable" source so we can compute
+ // multiple hashes more easily.
+ caHash = hashString(HashAlgorithm::SHA256, dump2.s);
+ switch (method.getFileIngestionMethod()) {
+ case FileIngestionMethod::Recursive:
+ // The dump is already NAR in this case, just use it.
+ nar = dump2.s;
+ break;
+ case FileIngestionMethod::Flat:
+ // The dump is Flat, so we need to convert it to NAR with a
+ // single file.
+ StringSink s;
+ dumpString(dump2.s, s);
+ nar = std::move(s.s);
+ break;
+ }
+ } else {
+ // Otherwise, we have to do th same hashing as NAR so our single
+ // hash will suffice for both purposes.
+ if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
+ unsupported("addToStoreFromDump");
+ }
+ StringSource narDump { nar };
+
+ // Use `narDump` if we wrote to `nar`.
+ Source & narDump2 = nar.size() > 0
+ ? static_cast