Merge remote-tracking branch 'origin/master' into tarball-cache

This commit is contained in:
Eelco Dolstra 2023-12-22 16:45:01 +01:00
commit cf02b3335c
125 changed files with 1045 additions and 937 deletions

10
.github/CODEOWNERS vendored
View file

@ -10,16 +10,8 @@
# This file # This file
.github/CODEOWNERS @edolstra .github/CODEOWNERS @edolstra
# Public documentation
/doc @fricklerhandwerk
*.md @fricklerhandwerk
# Documentation of built-in functions # Documentation of built-in functions
src/libexpr/primops.cc @fricklerhandwerk @roberth src/libexpr/primops.cc @roberth
# Documentation on experimental features
src/libutil/experimental-features.cc @fricklerhandwerk
# Documentation on configuration settings
src/libstore/globals.hh @fricklerhandwerk
# Libstore layer # Libstore layer
/src/libstore @thufschmitt /src/libstore @thufschmitt

View file

@ -10,6 +10,8 @@
<!-- Large change: Provide instructions to reviewers how to read the diff. --> <!-- Large change: Provide instructions to reviewers how to read the diff. -->
# Priorities # Priorities and Process
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc). Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
The Nix maintainer team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19) to [schedule and track reviews](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol).

View file

@ -21,7 +21,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Create backport PRs - name: Create backport PRs
# should be kept in sync with `version` # should be kept in sync with `version`
uses: zeebe-io/backport-action@v2.2.0 uses: zeebe-io/backport-action@v2.3.0
with: with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action # Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -24,7 +24,7 @@ makefiles = \
misc/upstart/local.mk misc/upstart/local.mk
endif endif
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes) ifeq ($(ENABLE_UNIT_TESTS), yes)
makefiles += \ makefiles += \
tests/unit/libutil/local.mk \ tests/unit/libutil/local.mk \
tests/unit/libutil-support/local.mk \ tests/unit/libutil-support/local.mk \
@ -34,16 +34,13 @@ makefiles += \
tests/unit/libexpr-support/local.mk tests/unit/libexpr-support/local.mk
endif endif
ifeq ($(ENABLE_TESTS), yes) ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
makefiles += \ makefiles += \
tests/functional/local.mk \ tests/functional/local.mk \
tests/functional/ca/local.mk \ tests/functional/ca/local.mk \
tests/functional/dyn-drv/local.mk \ tests/functional/dyn-drv/local.mk \
tests/functional/test-libstoreconsumer/local.mk \ tests/functional/test-libstoreconsumer/local.mk \
tests/functional/plugins/local.mk tests/functional/plugins/local.mk
else
makefiles += \
mk/disable-tests.mk
endif endif
OPTIMIZE = 1 OPTIMIZE = 1
@ -57,11 +54,40 @@ endif
include mk/lib.mk include mk/lib.mk
# Must be included after `mk/lib.mk` so isn't the default target.
ifneq ($(ENABLE_UNIT_TESTS), yes)
.PHONY: check
check:
@echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'."
@exit 1
endif
ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes)
.PHONY: installcheck
installcheck:
@echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'."
@exit 1
endif
# Must be included after `mk/lib.mk` so rules refer to variables defined # Must be included after `mk/lib.mk` so rules refer to variables defined
# by the library. Rules are not "lazy" like variables, unfortunately. # by the library. Rules are not "lazy" like variables, unfortunately.
ifeq ($(ENABLE_BUILD), yes)
ifeq ($(ENABLE_DOC_GEN), yes)
$(eval $(call include-sub-makefile, doc/manual/local.mk)) $(eval $(call include-sub-makefile, doc/manual/local.mk))
else
.PHONY: manual-html manpages
manual-html manpages:
@echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
@exit 1
endif endif
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) $(eval $(call include-sub-makefile, doc/internal-api/local.mk))
else
.PHONY: internal-api-html
internal-api-html:
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
@exit 1
endif
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src

View file

@ -9,8 +9,11 @@ CXXFLAGS = @CXXFLAGS@
CXXLTO = @CXXLTO@ CXXLTO = @CXXLTO@
EDITLINE_LIBS = @EDITLINE_LIBS@ EDITLINE_LIBS = @EDITLINE_LIBS@
ENABLE_BUILD = @ENABLE_BUILD@ ENABLE_BUILD = @ENABLE_BUILD@
ENABLE_DOC_GEN = @ENABLE_DOC_GEN@
ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@
ENABLE_INTERNAL_API_DOCS = @ENABLE_INTERNAL_API_DOCS@
ENABLE_S3 = @ENABLE_S3@ ENABLE_S3 = @ENABLE_S3@
ENABLE_TESTS = @ENABLE_TESTS@ ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@
GTEST_LIBS = @GTEST_LIBS@ GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@ HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@ HAVE_SECCOMP = @HAVE_SECCOMP@
@ -36,12 +39,10 @@ checkbindir = @checkbindir@
checklibdir = @checklibdir@ checklibdir = @checklibdir@
datadir = @datadir@ datadir = @datadir@
datarootdir = @datarootdir@ datarootdir = @datarootdir@
doc_generate = @doc_generate@
docdir = @docdir@ docdir = @docdir@
embedded_sandbox_shell = @embedded_sandbox_shell@ embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@ exec_prefix = @exec_prefix@
includedir = @includedir@ includedir = @includedir@
internal_api_docs = @internal_api_docs@
libdir = @libdir@ libdir = @libdir@
libexecdir = @libexecdir@ libexecdir = @libexecdir@
localstatedir = @localstatedir@ localstatedir = @localstatedir@

View file

@ -138,20 +138,38 @@ AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
AC_SUBST(ENABLE_BUILD) AC_SUBST(ENABLE_BUILD)
# Building without tests is useful for bootstrapping with a smaller footprint # Building without unit tests is useful for bootstrapping with a smaller footprint
# or running the tests in a separate derivation. Otherwise, we do compile and # or running the tests in a separate derivation. Otherwise, we do compile and
# run them. # run them.
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
AC_SUBST(ENABLE_TESTS)
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]),
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD)
internal_api_docs=$enableval, internal_api_docs=no) AC_SUBST(ENABLE_UNIT_TESTS)
AC_SUBST(internal_api_docs)
AS_IF( AS_IF(
[test "$ENABLE_BUILD" == "yes" || test "$ENABLE_TEST" == "yes"], [test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"],
[AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])])
AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]),
ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes)
AC_SUBST(ENABLE_FUNCTIONAL_TESTS)
# documentation generation switch
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
ENABLE_DOC_GEN=$enableval, ENABLE_DOC_GEN=$ENABLE_BUILD)
AC_SUBST(ENABLE_DOC_GEN)
AS_IF(
[test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"],
[AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])])
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
AC_ARG_ENABLE(internal-api-docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
ENABLE_INTERNAL_API_DOCS=$enableval, ENABLE_INTERNAL_API_DOCS=no)
AC_SUBST(ENABLE_INTERNAL_API_DOCS)
AS_IF(
[test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"],
[NEED_PROG(jq, jq)]) [NEED_PROG(jq, jq)])
AS_IF([test "$ENABLE_BUILD" == "yes"],[ AS_IF([test "$ENABLE_BUILD" == "yes"],[
@ -317,7 +335,7 @@ if test "$gc" = yes; then
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.]) AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
fi fi
AS_IF([test "$ENABLE_TESTS" == "yes"],[ AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
# Look for gtest. # Look for gtest.
PKG_CHECK_MODULES([GTEST], [gtest_main]) PKG_CHECK_MODULES([GTEST], [gtest_main])
@ -349,11 +367,6 @@ AC_LANG_POP(C++)
# Look for nlohmann/json. # Look for nlohmann/json.
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
# documentation generation switch
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
doc_generate=$enableval, doc_generate=yes)
AC_SUBST(doc_generate)
# Look for lowdown library. # Look for lowdown library.
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"]) PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])

View file

@ -1,19 +1,7 @@
.PHONY: internal-api-html
ifeq ($(internal_api_docs), yes)
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg $(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
mkdir -p $(docdir)/internal-api mkdir -p $(docdir)/internal-api
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen - { cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
# Generate the HTML API docs for Nix's unstable internal interfaces. # Generate the HTML API docs for Nix's unstable internal interfaces.
.PHONY: internal-api-html
internal-api-html: $(docdir)/internal-api/html/index.html internal-api-html: $(docdir)/internal-api/html/index.html
else
# Make a nicer error message
internal-api-html:
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
@exit 1
endif

View file

@ -1,5 +1,3 @@
ifeq ($(doc_generate),yes)
# The version of Nix used to generate the doc. Can also be # The version of Nix used to generate the doc. Can also be
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`), # `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
# if one prefers. # if one prefers.
@ -180,6 +178,8 @@ manual-html: $(docdir)/manual/index.html
install: $(docdir)/manual/index.html install: $(docdir)/manual/index.html
# Generate 'nix' manpages. # Generate 'nix' manpages.
.PHONY: manpages
manpages: $(mandir)/man1/nix3-manpages
install: $(mandir)/man1/nix3-manpages install: $(mandir)/man1/nix3-manpages
man: doc/manual/generated/man1/nix3-manpages man: doc/manual/generated/man1/nix3-manpages
all: doc/manual/generated/man1/nix3-manpages all: doc/manual/generated/man1/nix3-manpages
@ -225,5 +225,3 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/
@rm -rf $(DESTDIR)$(docdir)/manual @rm -rf $(DESTDIR)$(docdir)/manual
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual @mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
@rm -rf $(DESTDIR)$(docdir)/manual.tmp @rm -rf $(DESTDIR)$(docdir)/manual.tmp
endif

View file

@ -0,0 +1,6 @@
---
synopsis: "`nix profile` now allows referring to elements by human-readable name"
prs: 8678
---
[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version.

View file

@ -64,6 +64,27 @@ $ nix build
You can also build Nix for one of the [supported platforms](#platforms). You can also build Nix for one of the [supported platforms](#platforms).
## Makefile variables
You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run
`make install`.
You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment
variables to override `Makefile` variables.
- `ENABLE_BUILD=yes` to enable building the C++ code.
- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.).
The docs can take a while to build, so you may want to disable this for local development.
- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests.
- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests.
- `OPTIMIZE=1` to enable optimizations.
- `libraries=libutil programs=` to only build a specific library (this will
fail in the linking phase if you don't have the other libraries built, but is
useful for checking types).
- `libraries= programs=nix` to only build a specific program (this will not, in
general, work, because the programs need the libraries).
## Building Nix ## Building Nix
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:

View file

@ -72,7 +72,7 @@
This is an optional dependency and can be disabled This is an optional dependency and can be disabled
by providing a `--disable-cpuid` to the `configure` script. by providing a `--disable-cpuid` to the `configure` script.
- Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and - Unless `./configure --disable-unit-tests` is specified, GoogleTest (GTest) and
RapidCheck are required, which are available at RapidCheck are required, which are available at
<https://google.github.io/googletest/> and <https://google.github.io/googletest/> and
<https://github.com/emil-e/rapidcheck> respectively. <https://github.com/emil-e/rapidcheck> respectively.

View file

@ -395,7 +395,7 @@
stdenvs))); stdenvs)));
devShells = let devShells = let
makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (attrs: { makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: {
installFlags = "sysconfdir=$(out)/etc"; installFlags = "sysconfdir=$(out)/etc";
shellHook = '' shellHook = ''
PATH=$prefix/bin:$PATH PATH=$prefix/bin:$PATH

View file

@ -1,12 +0,0 @@
# This file is only active for `./configure --disable-tests`.
# Running `make check` or `make installcheck` would indicate a mistake in the
# caller.
installcheck:
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
@exit 1
# This currently has little effect.
check:
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
@exit 1

View file

@ -87,6 +87,9 @@
, test-daemon ? null , test-daemon ? null
, test-client ? null , test-client ? null
# Avoid setting things that would interfere with a functioning devShell
, forDevShell ? false
# Not a real argument, just the only way to approximate let-binding some # Not a real argument, just the only way to approximate let-binding some
# stuff for argument defaults. # stuff for argument defaults.
, __forDefaults ? { , __forDefaults ? {
@ -104,30 +107,6 @@ let
inherit doBuild doCheck doInstallCheck; inherit doBuild doCheck doInstallCheck;
}; };
filesets = {
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
configureFiles = fileset.unions [
./.version
./configure.ac
./m4
# TODO: do we really need README.md? It doesn't seem used in the build.
./README.md
];
topLevelBuildFiles = fileset.unions [
./local.mk
./Makefile
./Makefile.config.in
./mk
];
functionalTestFiles = fileset.unions [
./tests/functional
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
];
};
mkDerivation = mkDerivation =
if withCoverageChecks if withCoverageChecks
then then
@ -151,32 +130,44 @@ mkDerivation (finalAttrs: let
# to be run later, requiresthe unit tests to be built. # to be run later, requiresthe unit tests to be built.
buildUnitTests = doCheck || installUnitTests; buildUnitTests = doCheck || installUnitTests;
anySortOfTesting = buildUnitTests || doInstallCheck;
in { in {
inherit pname version; inherit pname version;
src = src =
let let
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
in in
fileset.toSource { fileset.toSource {
root = ./.; root = ./.;
fileset = fileset.intersect filesets.baseFiles (fileset.unions ([ fileset = fileset.intersect baseFiles (fileset.unions ([
filesets.configureFiles # For configure
filesets.topLevelBuildFiles ./.version
./doc/internal-api ./configure.ac
./m4
# TODO: do we really need README.md? It doesn't seem used in the build.
./README.md
# For make, regardless of what we are building
./local.mk
./Makefile
./Makefile.config.in
./mk
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
] ++ lib.optionals doBuild [ ] ++ lib.optionals doBuild [
./boehmgc-coroutine-sp-fallback.diff ./boehmgc-coroutine-sp-fallback.diff
./doc ./doc
./misc ./misc
./precompiled-headers.h ./precompiled-headers.h
./src ./src
./tests/unit
./COPYING ./COPYING
./scripts/local.mk ./scripts/local.mk
] ++ lib.optionals anySortOfTesting [ ] ++ lib.optionals buildUnitTests [
filesets.functionalTestFiles ./doc/manual
] ++ lib.optionals enableInternalAPIDocs [
./doc/internal-api
] ++ lib.optionals buildUnitTests [
./tests/unit
] ++ lib.optionals doInstallCheck [
./tests/functional
])); ]));
}; };
@ -275,12 +266,14 @@ in {
); );
configureFlags = [ configureFlags = [
"--sysconfdir=/etc"
(lib.enableFeature doBuild "build") (lib.enableFeature doBuild "build")
(lib.enableFeature anySortOfTesting "tests") (lib.enableFeature buildUnitTests "unit-tests")
(lib.enableFeature doInstallCheck "functional-tests")
(lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableInternalAPIDocs "internal-api-docs")
(lib.enableFeature enableManual "doc-gen") (lib.enableFeature enableManual "doc-gen")
(lib.enableFeature installUnitTests "install-unit-tests") (lib.enableFeature installUnitTests "install-unit-tests")
] ++ lib.optionals (!forDevShell) [
"--sysconfdir=/etc"
] ++ lib.optionals installUnitTests [ ] ++ lib.optionals installUnitTests [
"--with-check-bin-dir=${builtins.placeholder "check"}/bin" "--with-check-bin-dir=${builtins.placeholder "check"}/bin"
"--with-check-lib-dir=${builtins.placeholder "check"}/lib" "--with-check-lib-dir=${builtins.placeholder "check"}/lib"
@ -310,10 +303,7 @@ in {
''; '';
postInstall = lib.optionalString doBuild ( postInstall = lib.optionalString doBuild (
'' lib.optionalString stdenv.hostPlatform.isStatic ''
mkdir -p $doc/nix-support
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
'' + lib.optionalString stdenv.hostPlatform.isStatic ''
mkdir -p $out/nix-support mkdir -p $out/nix-support
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'' + lib.optionalString stdenv.isDarwin '' '' + lib.optionalString stdenv.isDarwin ''
@ -322,7 +312,10 @@ in {
$out/lib/libboost_context.dylib \ $out/lib/libboost_context.dylib \
$out/lib/libnixutil.dylib $out/lib/libnixutil.dylib
'' ''
) + lib.optionalString enableInternalAPIDocs '' ) + lib.optionalString enableManual ''
mkdir -p ''${!outputDoc}/nix-support
echo "doc manual ''${!outputDoc}/share/doc/nix/manual" >> ''${!outputDoc}/nix-support/hydra-build-products
'' + lib.optionalString enableInternalAPIDocs ''
mkdir -p ''${!outputDoc}/nix-support mkdir -p ''${!outputDoc}/nix-support
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
''; '';

View file

@ -13,6 +13,7 @@
#include "globals.hh" #include "globals.hh"
#include "store-api.hh" #include "store-api.hh"
#include "crypto.hh" #include "crypto.hh"
#include "posix-source-accessor.hh"
#include <sodium.h> #include <sodium.h>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
@ -205,7 +206,10 @@ void importPaths(int fd, int dontCheckSigs)
SV * hashPath(char * algo, int base32, char * path) SV * hashPath(char * algo, int base32, char * path)
PPCODE: PPCODE:
try { try {
Hash h = hashPath(parseHashAlgo(algo), path).first; PosixSourceAccessor accessor;
Hash h = hashPath(
accessor, CanonPath::fromCwd(path),
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
@ -281,7 +285,11 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
PPCODE: PPCODE:
try { try {
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo)); PosixSourceAccessor accessor;
auto path = store()->addToStore(
std::string(baseNameOf(srcPath)),
accessor, CanonPath::fromCwd(srcPath),
method, parseHashAlgo(algo));
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());

View file

@ -3,11 +3,13 @@
set -eu set -eu
set -o pipefail set -o pipefail
# System specific settings
export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}"
export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
# create by default; set 0 to DIY, use a symlink, etc. # create by default; set 0 to DIY, use a symlink, etc.
readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default
NIX_FIRST_BUILD_UID="301"
NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
# caution: may update times on / if not run as normal non-root user # caution: may update times on / if not run as normal non-root user
read_only_root() { read_only_root() {

View file

@ -25,9 +25,9 @@ readonly RED='\033[31m'
readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32}
readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}"
readonly NIX_BUILD_GROUP_NAME="nixbld" readonly NIX_BUILD_GROUP_NAME="nixbld"
# darwin installer needs to override these # each system specific installer must set these:
NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" # NIX_FIRST_BUILD_UID
NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" # NIX_BUILD_USER_NAME_TEMPLATE
# Please don't change this. We don't support it, because the # Please don't change this. We don't support it, because the
# default shell profile that comes with Nix doesn't support it. # default shell profile that comes with Nix doesn't support it.
readonly NIX_ROOT="/nix" readonly NIX_ROOT="/nix"
@ -707,6 +707,12 @@ EOF
fi fi
} }
check_required_system_specific_settings() {
if [ -z "${NIX_FIRST_BUILD_UID+x}" ] || [ -z "${NIX_BUILD_USER_NAME_TEMPLATE+x}" ]; then
failure "Internal error: System specific installer for $(uname) ($1) does not export required settings."
fi
}
welcome_to_nix() { welcome_to_nix() {
local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))" local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))"
local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}") local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}")
@ -726,7 +732,9 @@ manager. This will happen in a few stages:
if you are ready to continue. if you are ready to continue.
3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT} 3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT}
that the Nix daemon uses to run builds. that the Nix daemon uses to run builds. To create system users
in a different range, exit and run this tool again with
NIX_FIRST_BUILD_UID set.
4. Perform the basic installation of the Nix files daemon. 4. Perform the basic installation of the Nix files daemon.
@ -968,13 +976,16 @@ main() {
if is_os_darwin; then if is_os_darwin; then
# shellcheck source=./install-darwin-multi-user.sh # shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh" . "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
check_required_system_specific_settings "install-darwin-multi-user.sh"
elif is_os_linux; then elif is_os_linux; then
# shellcheck source=./install-systemd-multi-user.sh # shellcheck source=./install-systemd-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
check_required_system_specific_settings "install-systemd-multi-user.sh"
else else
failure "Sorry, I don't know what to do on $(uname)" failure "Sorry, I don't know what to do on $(uname)"
fi fi
welcome_to_nix welcome_to_nix
if ! is_root; then if ! is_root; then

View file

@ -3,6 +3,10 @@
set -eu set -eu
set -o pipefail set -o pipefail
# System specific settings
export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service
readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service

View file

@ -44,7 +44,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
{ {
if (v.type() == nPath) { if (v.type() == nPath) {
auto storePath = v.path().fetchToStore(state->store); auto storePath = v.path().fetchToStore(*state->store);
return {{ return {{
.path = DerivedPath::Opaque { .path = DerivedPath::Opaque {
.path = std::move(storePath), .path = std::move(storePath),

View file

@ -2317,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
auto dstPath = i != srcToStore.end() auto dstPath = i != srcToStore.end()
? i->second ? i->second
: [&]() { : [&]() {
auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
allowPath(dstPath); allowPath(dstPath);
srcToStore.insert_or_assign(path, dstPath); srcToStore.insert_or_assign(path, dstPath);
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));

View file

@ -190,7 +190,7 @@ std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
static std::regex flakeRegex( static std::regex flakeRegex(
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" "((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
+ "(?:#(" + queryRegex + "))?", + "(?:#(" + fragmentRegex + "))?",
std::regex::ECMAScript); std::regex::ECMAScript);
if (std::regex_match(url, match, flakeRegex)) { if (std::regex_match(url, match, flakeRegex)) {

View file

@ -0,0 +1,48 @@
#include "url-name.hh"
#include <regex>
#include <iostream>
namespace nix {
static const std::string attributeNamePattern("[a-z0-9_-]+");
static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
static const std::regex gitProviderRegex("github|gitlab|sourcehut");
static const std::regex gitSchemeRegex("git($|\\+.*)");
static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
std::optional<std::string> getNameFromURL(const ParsedURL & url)
{
std::smatch match;
/* If there is a dir= argument, use its value */
if (url.query.count("dir") > 0)
return url.query.at("dir");
/* If the fragment isn't a "default" and contains two attribute elements, use the last one */
if (std::regex_match(url.fragment, match, lastAttributeRegex))
return match.str(1);
/* If this is a github/gitlab/sourcehut flake, use the repo name */
if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
return match.str(1);
/* If it is a regular git flake, use the directory name */
if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
return match.str(1);
/* If everything failed but there is a non-default fragment, use it in full */
if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
return url.fragment;
/* If there is no fragment, take the last element of the path */
if (std::regex_match(url.path, match, lastPathSegmentRegex))
return match.str(1);
/* If even that didn't work, the URL does not contain enough info to determine a useful name */
return {};
}
}

View file

@ -0,0 +1,20 @@
#include "url.hh"
#include "url-parts.hh"
#include "util.hh"
#include "split.hh"
namespace nix {
/**
* Try to extract a reasonably unique and meaningful, human-readable
* name of a flake output from a parsed URL.
* When nullopt is returned, the callsite should use information available
* to it outside of the URL to determine a useful name.
* This is a heuristic approach intended for user interfaces.
* @return nullopt if the extracted name is not useful to identify a
* flake output, for example because it is empty or "default".
* Otherwise returns the extracted name.
*/
std::optional<std::string> getNameFromURL(const ParsedURL & url);
}

View file

@ -438,9 +438,7 @@ static RegisterPrimOp primop_isNull({
.doc = R"( .doc = R"(
Return `true` if *e* evaluates to `null`, and `false` otherwise. Return `true` if *e* evaluates to `null`, and `false` otherwise.
> **Warning** This is equivalent to `e == null`.
>
> This function is *deprecated*; just write `e == null` instead.
)", )",
.fun = prim_isNull, .fun = prim_isNull,
}); });
@ -2072,8 +2070,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
} }
auto storePath = settings.readOnlyMode auto storePath = settings.readOnlyMode
? state.store->computeStorePathForText(name, contents, refs) ? state.store->makeFixedOutputPathFromCA(name, TextInfo {
: state.store->addTextToStore(name, contents, refs, state.repair); .hash = hashString(HashAlgorithm::SHA256, contents),
.references = std::move(refs),
})
: ({
StringSource s { contents };
state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
});
/* Note: we don't need to add `context' to the context of the /* Note: we don't need to add `context' to the context of the
result, since `storePath' itself has references to the paths result, since `storePath' itself has references to the paths
@ -2229,7 +2233,7 @@ static void addPath(
}); });
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair); auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair);
if (expectedHash && expectedStorePath != dstPath) if (expectedHash && expectedStorePath != dstPath)
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
state.allowAndSetStorePathString(dstPath, v); state.allowAndSetStorePathString(dstPath, v);

View file

@ -106,7 +106,7 @@ struct CacheImpl : Cache
} }
void add( void add(
ref<Store> store, Store & store,
const Attrs & inAttrs, const Attrs & inAttrs,
const Attrs & infoAttrs, const Attrs & infoAttrs,
const StorePath & storePath, const StorePath & storePath,
@ -115,13 +115,13 @@ struct CacheImpl : Cache
_state.lock()->add.use() _state.lock()->add.use()
(attrsToJSON(inAttrs).dump()) (attrsToJSON(inAttrs).dump())
(attrsToJSON(infoAttrs).dump()) (attrsToJSON(infoAttrs).dump())
(store->printStorePath(storePath)) (store.printStorePath(storePath))
(locked) (locked)
(time(0)).exec(); (time(0)).exec();
} }
std::optional<std::pair<Attrs, StorePath>> lookup( std::optional<std::pair<Attrs, StorePath>> lookup(
ref<Store> store, Store & store,
const Attrs & inAttrs) override const Attrs & inAttrs) override
{ {
if (auto res = lookupExpired(store, inAttrs)) { if (auto res = lookupExpired(store, inAttrs)) {
@ -134,7 +134,7 @@ struct CacheImpl : Cache
} }
std::optional<Result> lookupExpired( std::optional<Result> lookupExpired(
ref<Store> store, Store & store,
const Attrs & inAttrs) override const Attrs & inAttrs) override
{ {
auto state(_state.lock()); auto state(_state.lock());
@ -148,19 +148,19 @@ struct CacheImpl : Cache
} }
auto infoJSON = stmt.getStr(0); auto infoJSON = stmt.getStr(0);
auto storePath = store->parseStorePath(stmt.getStr(1)); auto storePath = store.parseStorePath(stmt.getStr(1));
auto locked = stmt.getInt(2) != 0; auto locked = stmt.getInt(2) != 0;
auto timestamp = stmt.getInt(3); auto timestamp = stmt.getInt(3);
store->addTempRoot(storePath); store.addTempRoot(storePath);
if (!store->isValidPath(storePath)) { if (!store.isValidPath(storePath)) {
// FIXME: we could try to substitute 'storePath'. // FIXME: we could try to substitute 'storePath'.
debug("ignoring disappeared cache entry '%s'", inAttrsJSON); debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
return {}; return {};
} }
debug("using cache entry '%s' -> '%s', '%s'", debug("using cache entry '%s' -> '%s', '%s'",
inAttrsJSON, infoJSON, store->printStorePath(storePath)); inAttrsJSON, infoJSON, store.printStorePath(storePath));
return Result { return Result {
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)), .expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),

View file

@ -50,14 +50,14 @@ struct Cache
/* Old cache for things that have a store path. */ /* Old cache for things that have a store path. */
virtual void add( virtual void add(
ref<Store> store, Store & store,
const Attrs & inAttrs, const Attrs & inAttrs,
const Attrs & infoAttrs, const Attrs & infoAttrs,
const StorePath & storePath, const StorePath & storePath,
bool locked) = 0; bool locked) = 0;
virtual std::optional<std::pair<Attrs, StorePath>> lookup( virtual std::optional<std::pair<Attrs, StorePath>> lookup(
ref<Store> store, Store & store,
const Attrs & inAttrs) = 0; const Attrs & inAttrs) = 0;
struct Result struct Result
@ -68,7 +68,7 @@ struct Cache
}; };
virtual std::optional<Result> lookupExpired( virtual std::optional<Result> lookupExpired(
ref<Store> store, Store & store,
const Attrs & inAttrs) = 0; const Attrs & inAttrs) = 0;
}; };

View file

@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input) std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
{ {
auto [accessor, input2] = getAccessor(store, input); auto [accessor, input2] = getAccessor(store, input);
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName()); auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName());
return {storePath, input2}; return {storePath, input2};
} }

View file

@ -368,14 +368,14 @@ struct GitInputScheme : InputScheme
RepoInfo getRepoInfo(const Input & input) const RepoInfo getRepoInfo(const Input & input) const
{ {
auto checkHashType = [&](const std::optional<Hash> & hash) auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
{ {
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256)) if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
}; };
if (auto rev = input.getRev()) if (auto rev = input.getRev())
checkHashType(rev); checkHashAlgorithm(rev);
RepoInfo repoInfo; RepoInfo repoInfo;

View file

@ -5,10 +5,10 @@
namespace nix { namespace nix {
StorePath InputAccessor::fetchToStore( StorePath InputAccessor::fetchToStore(
ref<Store> store, Store & store,
const CanonPath & path, const CanonPath & path,
std::string_view name, std::string_view name,
FileIngestionMethod method, ContentAddressMethod method,
PathFilter * filter, PathFilter * filter,
RepairFlag repair) RepairFlag repair)
{ {
@ -20,10 +20,24 @@ StorePath InputAccessor::fetchToStore(
if (!filter && fingerprint) { if (!filter && fingerprint) {
cacheKey = fetchers::Attrs{ cacheKey = fetchers::Attrs{
{"_what", "fetchToStore"}, {"_what", "fetchToStore"},
{"store", store->storeDir}, {"store", store.storeDir},
{"name", std::string(name)}, {"name", std::string(name)},
{"fingerprint", *fingerprint}, {"fingerprint", *fingerprint},
{"method", (uint8_t) method}, {
"method",
std::visit(overloaded {
[](const TextIngestionMethod &) {
return "text";
},
[](const FileIngestionMethod & fim) {
switch (fim) {
case FileIngestionMethod::Flat: return "flat";
case FileIngestionMethod::Recursive: return "nar";
default: assert(false);
}
},
}, method.raw),
},
{"path", path.abs()} {"path", path.abs()}
}; };
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
@ -35,17 +49,14 @@ StorePath InputAccessor::fetchToStore(
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path))); Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
auto source = sinkToSource([&](Sink & sink) { auto filter2 = filter ? *filter : defaultPathFilter;
if (method == FileIngestionMethod::Recursive)
dumpPath(path, sink, filter ? *filter : defaultPathFilter);
else
readFile(path, sink);
});
auto storePath = auto storePath =
settings.readOnlyMode settings.readOnlyMode
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first ? store.computeStorePath(
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair); name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first
: store.addToStore(
name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
if (cacheKey) if (cacheKey)
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
@ -60,9 +71,9 @@ std::ostream & operator << (std::ostream & str, const SourcePath & path)
} }
StorePath SourcePath::fetchToStore( StorePath SourcePath::fetchToStore(
ref<Store> store, Store & store,
std::string_view name, std::string_view name,
FileIngestionMethod method, ContentAddressMethod method,
PathFilter * filter, PathFilter * filter,
RepairFlag repair) const RepairFlag repair) const
{ {

View file

@ -30,10 +30,10 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<Inpu
} }
StorePath fetchToStore( StorePath fetchToStore(
ref<Store> store, Store & store,
const CanonPath & path, const CanonPath & path,
std::string_view name = "source", std::string_view name = "source",
FileIngestionMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr, PathFilter * filter = nullptr,
RepairFlag repair = NoRepair); RepairFlag repair = NoRepair);
}; };
@ -116,9 +116,9 @@ struct SourcePath
* Copy this `SourcePath` to the Nix store. * Copy this `SourcePath` to the Nix store.
*/ */
StorePath fetchToStore( StorePath fetchToStore(
ref<Store> store, Store & store,
std::string_view name = "source", std::string_view name = "source",
FileIngestionMethod method = FileIngestionMethod::Recursive, ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr, PathFilter * filter = nullptr,
RepairFlag repair = NoRepair) const; RepairFlag repair = NoRepair) const;

View file

@ -6,6 +6,7 @@
#include "tarfile.hh" #include "tarfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh" #include "url-parts.hh"
#include "posix-source-accessor.hh"
#include "fetch-settings.hh" #include "fetch-settings.hh"
@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme
return files.count(file); return files.count(file);
}; };
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter); PosixSourceAccessor accessor;
auto storePath = store->addToStore(
input.getName(),
accessor, CanonPath { actualPath },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
filter);
return {std::move(storePath), input}; return {std::move(storePath), input};
} }
@ -218,7 +224,7 @@ struct MercurialInputScheme : InputScheme
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default"); if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
auto checkHashType = [&](const std::optional<Hash> & hash) auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
{ {
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1) if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true)); throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
@ -227,7 +233,7 @@ struct MercurialInputScheme : InputScheme
auto getLockedAttrs = [&]() auto getLockedAttrs = [&]()
{ {
checkHashType(input.getRev()); checkHashAlgorithm(input.getRev());
return Attrs({ return Attrs({
{"type", "hg"}, {"type", "hg"},
@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme
}; };
if (input.getRev()) { if (input.getRev()) {
if (auto res = getCache()->lookup(store, getLockedAttrs())) if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second)); return makeResult(res->first, std::move(res->second));
} }
@ -259,7 +265,7 @@ struct MercurialInputScheme : InputScheme
{"ref", *input.getRef()}, {"ref", *input.getRef()},
}); });
if (auto res = getCache()->lookup(store, unlockedAttrs)) { if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1); auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
if (!input.getRev() || input.getRev() == rev2) { if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev()); input.attrs.insert_or_assign("rev", rev2.gitRev());
@ -305,7 +311,7 @@ struct MercurialInputScheme : InputScheme
auto revCount = std::stoull(tokens[1]); auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]); input.attrs.insert_or_assign("ref", tokens[2]);
if (auto res = getCache()->lookup(store, getLockedAttrs())) if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second)); return makeResult(res->first, std::move(res->second));
Path tmpDir = createTempDir(); Path tmpDir = createTempDir();
@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme
deletePath(tmpDir + "/.hg_archival.txt"); deletePath(tmpDir + "/.hg_archival.txt");
auto storePath = store->addToStore(name, tmpDir); PosixSourceAccessor accessor;
auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir });
Attrs infoAttrs({ Attrs infoAttrs({
{"rev", input.getRev()->gitRev()}, {"rev", input.getRev()->gitRev()},
@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme
if (!_input.getRev()) if (!_input.getRev())
getCache()->add( getCache()->add(
store, *store,
unlockedAttrs, unlockedAttrs,
infoAttrs, infoAttrs,
storePath, storePath,
false); false);
getCache()->add( getCache()->add(
store, *store,
getLockedAttrs(), getLockedAttrs(),
infoAttrs, infoAttrs,
storePath, storePath,

View file

@ -8,6 +8,7 @@
#include "tarfile.hh" #include "tarfile.hh"
#include "types.hh" #include "types.hh"
#include "split.hh" #include "split.hh"
#include "posix-source-accessor.hh"
namespace nix::fetchers { namespace nix::fetchers {
@ -26,7 +27,7 @@ DownloadFileResult downloadFile(
{"name", name}, {"name", name},
}); });
auto cached = getCache()->lookupExpired(store, inAttrs); auto cached = getCache()->lookupExpired(*store, inAttrs);
auto useCached = [&]() -> DownloadFileResult auto useCached = [&]() -> DownloadFileResult
{ {
@ -91,7 +92,7 @@ DownloadFileResult downloadFile(
} }
getCache()->add( getCache()->add(
store, *store,
inAttrs, inAttrs,
infoAttrs, infoAttrs,
*storePath, *storePath,
@ -99,7 +100,7 @@ DownloadFileResult downloadFile(
if (url != res.effectiveUri) if (url != res.effectiveUri)
getCache()->add( getCache()->add(
store, *store,
{ {
{"type", "file"}, {"type", "file"},
{"url", res.effectiveUri}, {"url", res.effectiveUri},
@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball(
{"name", name}, {"name", name},
}); });
auto cached = getCache()->lookupExpired(store, inAttrs); auto cached = getCache()->lookupExpired(*store, inAttrs);
if (cached && !cached->expired) if (cached && !cached->expired)
return { return {
@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name; auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime; lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair); PosixSourceAccessor accessor;
unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
} }
Attrs infoAttrs({ Attrs infoAttrs({
@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball(
infoAttrs.emplace("immutableUrl", *res.immutableUrl); infoAttrs.emplace("immutableUrl", *res.immutableUrl);
getCache()->add( getCache()->add(
store, *store,
inAttrs, inAttrs,
infoAttrs, infoAttrs,
*unpackedStorePath, *unpackedStorePath,

View file

@ -12,6 +12,7 @@
#include "thread-pool.hh" #include "thread-pool.hh"
#include "callback.hh" #include "callback.hh"
#include "signals.hh" #include "signals.hh"
#include "archive.hh"
#include <chrono> #include <chrono>
#include <future> #include <future>
@ -300,24 +301,60 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}}); }});
} }
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name, StorePath BinaryCacheStore::addToStoreFromDump(
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) Source & dump,
std::string_view name,
ContentAddressMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair)
{ {
std::optional<Hash> caHash;
std::string nar;
if (auto * dump2p = dynamic_cast<StringSource *>(&dump)) {
auto & dump2 = *dump2p;
// Hack, this gives us a "replayable" source so we can compute
// multiple hashes more easily.
caHash = hashString(HashAlgorithm::SHA256, dump2.s);
switch (method.getFileIngestionMethod()) {
case FileIngestionMethod::Recursive:
// The dump is already NAR in this case, just use it.
nar = dump2.s;
break;
case FileIngestionMethod::Flat:
// The dump is Flat, so we need to convert it to NAR with a
// single file.
StringSink s;
dumpString(dump2.s, s);
nar = std::move(s.s);
break;
}
} else {
// Otherwise, we have to do th same hashing as NAR so our single
// hash will suffice for both purposes.
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
unsupported("addToStoreFromDump"); unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) { }
StringSource narDump { nar };
// Use `narDump` if we wrote to `nar`.
Source & narDump2 = nar.size() > 0
? static_cast<Source &>(narDump)
: dump;
return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { ValidPathInfo info {
*this, *this,
name, name,
FixedOutputInfo { ContentAddressWithReferences::fromParts(
.method = method, method,
.hash = nar.first, caHash ? *caHash : nar.first,
.references = { {
.others = references, .others = references,
// caller is not capable of creating a self-reference, because this is content-addressed without modulus // caller is not capable of creating a self-reference, because this is content-addressed without modulus
.self = false, .self = false,
}, }),
},
nar.first, nar.first,
}; };
info.narSize = nar.second; info.narSize = nar.second;
@ -400,71 +437,35 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
StorePath BinaryCacheStore::addToStore( StorePath BinaryCacheStore::addToStore(
std::string_view name, std::string_view name,
const Path & srcPath, SourceAccessor & accessor,
FileIngestionMethod method, const CanonPath & path,
ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
const StorePathSet & references,
PathFilter & filter, PathFilter & filter,
RepairFlag repair, RepairFlag repair)
const StorePathSet & references)
{ {
/* FIXME: Make BinaryCacheStore::addToStoreCommon support /* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default non-recursive+sha256 so we can just use the default
implementation of this method in terms of addToStoreFromDump. */ implementation of this method in terms of addToStoreFromDump. */
HashSink sink { hashAlgo }; auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter);
} else {
readFile(srcPath, sink);
}
auto h = sink.finish().first;
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
dumpPath(srcPath, sink, filter); accessor.dumpPath(path, sink, filter);
}); });
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) { return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { ValidPathInfo info {
*this, *this,
name, name,
FixedOutputInfo { ContentAddressWithReferences::fromParts(
.method = method, method,
.hash = h, h,
.references = { {
.others = references, .others = references,
// caller is not capable of creating a self-reference, because this is content-addressed without modulus // caller is not capable of creating a self-reference, because this is content-addressed without modulus
.self = false, .self = false,
}, }),
},
nar.first,
};
info.narSize = nar.second;
return info;
})->path;
}
StorePath BinaryCacheStore::addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair)
{
auto textHash = hashString(HashAlgorithm::SHA256, s);
auto path = makeTextPath(name, TextInfo { { textHash }, references });
if (!repair && isValidPath(path))
return path;
StringSink sink;
dumpString(s, sink);
StringSource source(sink.s);
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
*this,
std::string { name },
TextInfo {
.hash = textHash,
.references = references,
},
nar.first, nar.first,
}; };
info.narSize = nar.second; info.narSize = nar.second;

View file

@ -123,22 +123,22 @@ public:
void addToStore(const ValidPathInfo & info, Source & narSource, void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, std::string_view name, StorePath addToStoreFromDump(
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override; Source & dump,
std::string_view name,
ContentAddressMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair) override;
StorePath addToStore( StorePath addToStore(
std::string_view name, std::string_view name,
const Path & srcPath, SourceAccessor & accessor,
FileIngestionMethod method, const CanonPath & srcPath,
ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override;
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references, const StorePathSet & references,
PathFilter & filter,
RepairFlag repair) override; RepairFlag repair) override;
void registerDrvOutput(const Realisation & info) override; void registerDrvOutput(const Realisation & info) override;

View file

@ -20,6 +20,7 @@
#include "child.hh" #include "child.hh"
#include "unix-domain-socket.hh" #include "unix-domain-socket.hh"
#include "posix-fs-canonicalise.hh" #include "posix-fs-canonicalise.hh"
#include "posix-source-accessor.hh"
#include <regex> #include <regex>
#include <queue> #include <queue>
@ -1291,12 +1292,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
StorePath addToStore( StorePath addToStore(
std::string_view name, std::string_view name,
const Path & srcPath, SourceAccessor & accessor,
FileIngestionMethod method, const CanonPath & srcPath,
ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
const StorePathSet & references,
PathFilter & filter, PathFilter & filter,
RepairFlag repair, RepairFlag repair) override
const StorePathSet & references) override
{ throw Error("addToStore"); } { throw Error("addToStore"); }
void addToStore(const ValidPathInfo & info, Source & narSource, void addToStore(const ValidPathInfo & info, Source & narSource,
@ -1306,26 +1308,15 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
goal.addDependency(info.path); goal.addDependency(info.path);
} }
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair = NoRepair) override
{
auto path = next->addTextToStore(name, s, references, repair);
goal.addDependency(path);
return path;
}
StorePath addToStoreFromDump( StorePath addToStoreFromDump(
Source & dump, Source & dump,
std::string_view name, std::string_view name,
FileIngestionMethod method, ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
RepairFlag repair, const StorePathSet & references,
const StorePathSet & references) override RepairFlag repair) override
{ {
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references); auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair);
goal.addDependency(path); goal.addDependency(path);
return path; return path;
} }
@ -2453,8 +2444,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
throw BuildError( throw BuildError(
"output path %1% without valid stats info", "output path %1% without valid stats info",
actualPath); actualPath);
if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } || if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
outputHash.method == ContentAddressMethod { TextIngestionMethod {} })
{ {
/* The output path should be a regular file without execute permission. */ /* The output path should be a regular file without execute permission. */
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
@ -2466,38 +2456,23 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
rewriteOutput(outputRewrites); rewriteOutput(outputRewrites);
/* FIXME optimize and deduplicate with addToStore */ /* FIXME optimize and deduplicate with addToStore */
std::string oldHashPart { scratchPath->hashPart() }; std::string oldHashPart { scratchPath->hashPart() };
auto got = ({
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
std::visit(overloaded { PosixSourceAccessor accessor;
[&](const TextIngestionMethod &) { dumpPath(
readFile(actualPath, caSink); accessor, CanonPath { actualPath },
}, caSink,
[&](const FileIngestionMethod & m2) { outputHash.method.getFileIngestionMethod());
switch (m2) { caSink.finish().first;
case FileIngestionMethod::Recursive: });
dumpPath(actualPath, caSink);
break;
case FileIngestionMethod::Flat:
readFile(actualPath, caSink);
break;
}
},
}, outputHash.method.raw);
auto got = caSink.finish().first;
auto optCA = ContentAddressWithReferences::fromPartsOpt(
outputHash.method,
std::move(got),
rewriteRefs());
if (!optCA) {
// TODO track distinct failure modes separately (at the time of
// writing there is just one but `nullopt` is unclear) so this
// message can't get out of sync.
throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing");
}
ValidPathInfo newInfo0 { ValidPathInfo newInfo0 {
worker.store, worker.store,
outputPathName(drv->name, outputName), outputPathName(drv->name, outputName),
std::move(*optCA), ContentAddressWithReferences::fromParts(
outputHash.method,
std::move(got),
rewriteRefs()),
Hash::dummy, Hash::dummy,
}; };
if (*scratchPath != newInfo0.path) { if (*scratchPath != newInfo0.path) {
@ -2511,9 +2486,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string(newInfo0.path.hashPart())}}); std::string(newInfo0.path.hashPart())}});
} }
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath); {
PosixSourceAccessor accessor;
HashResult narHashAndSize = hashPath(
accessor, CanonPath { actualPath },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
newInfo0.narHash = narHashAndSize.first; newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
}
assert(newInfo0.ca); assert(newInfo0.ca);
return newInfo0; return newInfo0;
@ -2531,7 +2511,10 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string { scratchPath->hashPart() }, std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() }); std::string { requiredFinalPath.hashPart() });
rewriteOutput(outputRewrites); rewriteOutput(outputRewrites);
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath); PosixSourceAccessor accessor;
HashResult narHashAndSize = hashPath(
accessor, CanonPath { actualPath },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second; newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs(); auto refs = rewriteRefs();

View file

@ -68,6 +68,7 @@ R""(
(allow file* (allow file*
(literal "/dev/null") (literal "/dev/null")
(literal "/dev/random") (literal "/dev/random")
(literal "/dev/stderr")
(literal "/dev/stdin") (literal "/dev/stdin")
(literal "/dev/stdout") (literal "/dev/stdout")
(literal "/dev/tty") (literal "/dev/tty")

View file

@ -519,7 +519,9 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path))) if (!pathExists(store.printStorePath(path)))
res = false; res = false;
else { else {
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path)); HashResult current = hashPath(
*store.getFSAccessor(), CanonPath { store.printStorePath(path) },
FileIngestionMethod::Recursive, info->narHash.algo);
Hash nullHash(HashAlgorithm::SHA256); Hash nullHash(HashAlgorithm::SHA256);
res = info->narHash == nullHash || info->narHash == current.first; res = info->narHash == nullHash || info->narHash == current.first;
} }

View file

@ -50,6 +50,18 @@ std::string ContentAddressMethod::render(HashAlgorithm ha) const
}, raw); }, raw);
} }
FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
{
return std::visit(overloaded {
[&](const TextIngestionMethod & th) {
return FileIngestionMethod::Flat;
},
[&](const FileIngestionMethod & fim) {
return fim;
}
}, raw);
}
std::string ContentAddress::render() const std::string ContentAddress::render() const
{ {
return std::visit(overloaded { return std::visit(overloaded {
@ -79,7 +91,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
prefix = *optPrefix; prefix = *optPrefix;
} }
auto parseHashType_ = [&](){ auto parseHashAlgorithm_ = [&](){
auto hashTypeRaw = splitPrefixTo(rest, ':'); auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw) if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput); throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
@ -90,7 +102,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
// Switch on prefix // Switch on prefix
if (prefix == "text") { if (prefix == "text") {
// No parsing of the ingestion method, "text" only support flat. // No parsing of the ingestion method, "text" only support flat.
HashAlgorithm hashAlgo = parseHashType_(); HashAlgorithm hashAlgo = parseHashAlgorithm_();
return { return {
TextIngestionMethod {}, TextIngestionMethod {},
std::move(hashAlgo), std::move(hashAlgo),
@ -100,7 +112,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:")) if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
HashAlgorithm hashAlgo = parseHashType_(); HashAlgorithm hashAlgo = parseHashAlgorithm_();
return { return {
std::move(method), std::move(method),
std::move(hashAlgo), std::move(hashAlgo),
@ -176,13 +188,13 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con
}, ca.method.raw); }, ca.method.raw);
} }
std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPartsOpt( ContentAddressWithReferences ContentAddressWithReferences::fromParts(
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept ContentAddressMethod method, Hash hash, StoreReferences refs)
{ {
return std::visit(overloaded { return std::visit(overloaded {
[&](TextIngestionMethod _) -> std::optional<ContentAddressWithReferences> { [&](TextIngestionMethod _) -> ContentAddressWithReferences {
if (refs.self) if (refs.self)
return std::nullopt; throw Error("self-reference not allowed with text hashing");
return ContentAddressWithReferences { return ContentAddressWithReferences {
TextInfo { TextInfo {
.hash = std::move(hash), .hash = std::move(hash),
@ -190,7 +202,7 @@ std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPa
} }
}; };
}, },
[&](FileIngestionMethod m2) -> std::optional<ContentAddressWithReferences> { [&](FileIngestionMethod m2) -> ContentAddressWithReferences {
return ContentAddressWithReferences { return ContentAddressWithReferences {
FixedOutputInfo { FixedOutputInfo {
.method = m2, .method = m2,

View file

@ -4,6 +4,7 @@
#include <variant> #include <variant>
#include "hash.hh" #include "hash.hh"
#include "path.hh" #include "path.hh"
#include "file-content-address.hh"
#include "comparator.hh" #include "comparator.hh"
#include "variant-wrapper.hh" #include "variant-wrapper.hh"
@ -31,22 +32,6 @@ namespace nix {
*/ */
struct TextIngestionMethod : std::monostate { }; struct TextIngestionMethod : std::monostate { };
/**
* An enumeration of the main ways we can serialize file system
* objects.
*/
enum struct FileIngestionMethod : uint8_t {
/**
* Flat-file hashing. Directly ingest the contents of a single file
*/
Flat = 0,
/**
* Recursive (or NAR) hashing. Serializes the file-system object in Nix
* Archive format and ingest that
*/
Recursive = 1
};
/** /**
* Compute the prefix to the hash algorithm which indicates how the * Compute the prefix to the hash algorithm which indicates how the
* files were ingested. * files were ingested.
@ -54,7 +39,7 @@ enum struct FileIngestionMethod : uint8_t {
std::string makeFileIngestionPrefix(FileIngestionMethod m); std::string makeFileIngestionPrefix(FileIngestionMethod m);
/** /**
* An enumeration of all the ways we can serialize file system objects. * An enumeration of all the ways we can content-address store objects.
* *
* Just the type of a content address. Combine with the hash itself, and * Just the type of a content address. Combine with the hash itself, and
* we have a `ContentAddress` as defined below. Combine that, in turn, * we have a `ContentAddress` as defined below. Combine that, in turn,
@ -102,7 +87,15 @@ struct ContentAddressMethod
* *
* The rough inverse of `parse()`. * The rough inverse of `parse()`.
*/ */
std::string render(HashAlgorithm ha) const; std::string render(HashAlgorithm ht) const;
/**
* Get the underlying way to content-address file system objects.
*
* Different ways of hashing store objects may use the same method
* for hashing file systeme objects.
*/
FileIngestionMethod getFileIngestionMethod() const;
}; };
@ -116,11 +109,11 @@ struct ContentAddressMethod
* serialisation methods (flat file vs NAR). Thus, ca has one of the * serialisation methods (flat file vs NAR). Thus, ca has one of the
* following forms: * following forms:
* *
* - text:sha256:<sha256 hash of file contents>: For paths * - `TextIngestionMethod`:
* computed by Store::makeTextPath() / Store::addTextToStore(). * text:sha256:<sha256 hash of file contents>
* *
* - fixed:<r?>:<ht>:<h>: For paths computed by * - `FixedIngestionMethod`:
* Store::makeFixedOutputPath() / Store::addToStore(). * fixed:<r?>:<hash type>:<hash of file contents>
*/ */
struct ContentAddress struct ContentAddress
{ {
@ -266,11 +259,12 @@ struct ContentAddressWithReferences
* *
* @param refs References to other store objects or oneself. * @param refs References to other store objects or oneself.
* *
* Do note that not all combinations are supported; `nullopt` is * @note note that all combinations are supported. This is a
* returns for invalid combinations. * *partial function* and exceptions will be thrown for invalid
* combinations.
*/ */
static std::optional<ContentAddressWithReferences> fromPartsOpt( static ContentAddressWithReferences fromParts(
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept; ContentAddressMethod method, Hash hash, StoreReferences refs);
ContentAddressMethod getMethod() const; ContentAddressMethod getMethod() const;

View file

@ -403,22 +403,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr); auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
auto hashAlgo = hashAlgo_; // work around clang bug auto hashAlgo = hashAlgo_; // work around clang bug
FramedSource source(from); FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store. // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded { auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair);
[&](const TextIngestionMethod &) {
if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashAlgo(hashAlgo));
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path); return store->queryPathInfo(path);
},
[&](const FileIngestionMethod & fim) {
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
return store->queryPathInfo(path);
},
}, contentAddressMethod.raw);
}(); }();
logger->stopWork(); logger->stopWork();
@ -496,7 +483,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
std::string s = readString(from); std::string s = readString(from);
auto refs = WorkerProto::Serialise<StorePathSet>::read(*store, rconn); auto refs = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
logger->startWork(); logger->startWork();
auto path = store->addTextToStore(suffix, s, refs, NoRepair); auto path = ({
StringSource source { s };
store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
});
logger->stopWork(); logger->stopWork();
to << store->printStorePath(path); to << store->printStorePath(path);
break; break;

View file

@ -143,8 +143,14 @@ StorePath writeDerivation(Store & store,
auto suffix = std::string(drv.name) + drvExtension; auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(store, false); auto contents = drv.unparse(store, false);
return readOnly || settings.readOnlyMode return readOnly || settings.readOnlyMode
? store.computeStorePathForText(suffix, contents, references) ? store.makeFixedOutputPathFromCA(suffix, TextInfo {
: store.addTextToStore(suffix, contents, references, repair); .hash = hashString(HashAlgorithm::SHA256, contents),
.references = std::move(references),
})
: ({
StringSource s { contents };
store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
});
} }

View file

@ -58,13 +58,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
RepairFlag repair, CheckSigsFlag checkSigs) override RepairFlag repair, CheckSigsFlag checkSigs) override
{ unsupported("addToStore"); } { unsupported("addToStore"); }
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair) override
{ unsupported("addTextToStore"); }
void narFromPath(const StorePath & path, Sink & sink) override void narFromPath(const StorePath & path, Sink & sink) override
{ unsupported("narFromPath"); } { unsupported("narFromPath"); }

View file

@ -631,7 +631,7 @@ public:
At least one of the following condition must be met At least one of the following condition must be met
for Nix to accept copying a store object from another for Nix to accept copying a store object from another
Nix store (such as a substituter): Nix store (such as a [substituter](#conf-substituters)):
- the store object has been signed using a key in the trusted keys list - the store object has been signed using a key in the trusted keys list
- the [`require-sigs`](#conf-require-sigs) option has been set to `false` - the [`require-sigs`](#conf-require-sigs) option has been set to `false`

View file

@ -60,20 +60,14 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
StorePath addToStore( StorePath addToStore(
std::string_view name, std::string_view name,
const Path & srcPath, SourceAccessor & accessor,
FileIngestionMethod method, const CanonPath & srcPath,
ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
{ unsupported("addToStore"); }
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references, const StorePathSet & references,
PathFilter & filter,
RepairFlag repair) override RepairFlag repair) override
{ unsupported("addTextToStore"); } { unsupported("addToStore"); }
private: private:

View file

@ -13,6 +13,7 @@
#include "compression.hh" #include "compression.hh"
#include "signals.hh" #include "signals.hh"
#include "posix-fs-canonicalise.hh" #include "posix-fs-canonicalise.hh"
#include "posix-source-accessor.hh"
#include <iostream> #include <iostream>
#include <algorithm> #include <algorithm>
@ -1088,11 +1089,22 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (info.ca) { if (info.ca) {
auto & specified = *info.ca; auto & specified = *info.ca;
auto actualHash = hashCAPath( auto actualHash = ({
specified.method, HashModuloSink caSink {
specified.hash.algo, specified.hash.algo,
info.path std::string { info.path.hashPart() },
); };
PosixSourceAccessor accessor;
dumpPath(
*getFSAccessor(false),
CanonPath { printStorePath(info.path) },
caSink,
specified.method.getFileIngestionMethod());
ContentAddress {
.method = specified.method,
.hash = caSink.finish().first,
};
});
if (specified.hash != actualHash.hash) { if (specified.hash != actualHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), printStorePath(info.path),
@ -1115,8 +1127,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
} }
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name, StorePath LocalStore::addToStoreFromDump(
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) Source & source0,
std::string_view name,
ContentAddressMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair)
{ {
/* For computing the store path. */ /* For computing the store path. */
auto hashSink = std::make_unique<HashSink>(hashAlgo); auto hashSink = std::make_unique<HashSink>(hashAlgo);
@ -1166,25 +1183,21 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
delTempDir = std::make_unique<AutoDelete>(tempDir); delTempDir = std::make_unique<AutoDelete>(tempDir);
tempPath = tempDir + "/x"; tempPath = tempDir + "/x";
if (method == FileIngestionMethod::Recursive) restorePath(tempPath, bothSource, method.getFileIngestionMethod());
restorePath(tempPath, bothSource);
else
writeFile(tempPath, bothSource);
dump.clear(); dump.clear();
} }
auto [hash, size] = hashSink->finish(); auto [hash, size] = hashSink->finish();
ContentAddressWithReferences desc = FixedOutputInfo { auto desc = ContentAddressWithReferences::fromParts(
.method = method, method,
.hash = hash, hash,
.references = { {
.others = references, .others = references,
// caller is not capable of creating a self-reference, because this is content-addressed without modulus // caller is not capable of creating a self-reference, because this is content-addressed without modulus
.self = false, .self = false,
}, });
};
auto dstPath = makeFixedOutputPathFromCA(name, desc); auto dstPath = makeFixedOutputPathFromCA(name, desc);
@ -1207,11 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
if (inMemory) { if (inMemory) {
StringSource dumpSource { dump }; StringSource dumpSource { dump };
/* Restore from the NAR in memory. */ /* Restore from the buffer in memory. */
if (method == FileIngestionMethod::Recursive) restorePath(realPath, dumpSource, method.getFileIngestionMethod());
restorePath(realPath, dumpSource);
else
writeFile(realPath, dumpSource);
} else { } else {
/* Move the temporary path we restored above. */ /* Move the temporary path we restored above. */
moveFile(tempPath, realPath); moveFile(tempPath, realPath);
@ -1247,58 +1257,6 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
} }
StorePath LocalStore::addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references, RepairFlag repair)
{
auto hash = hashString(HashAlgorithm::SHA256, s);
auto dstPath = makeTextPath(name, TextInfo {
.hash = hash,
.references = references,
});
addTempRoot(dstPath);
if (repair || !isValidPath(dstPath)) {
auto realPath = Store::toRealPath(dstPath);
PathLocks outputLock({realPath});
if (repair || !isValidPath(dstPath)) {
deletePath(realPath);
autoGC();
writeFile(realPath, s);
canonicalisePathMetaData(realPath, {});
StringSink sink;
dumpString(s, sink);
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
optimisePath(realPath, repair);
ValidPathInfo info { dstPath, narHash };
info.narSize = sink.s.size();
info.references = references;
info.ca = {
.method = TextIngestionMethod {},
.hash = hash,
};
registerValidPath(info);
}
outputLock.setDeletion(true);
}
return dstPath;
}
/* Create a temporary directory in the store that won't be /* Create a temporary directory in the store that won't be
garbage-collected until the returned FD is closed. */ garbage-collected until the returned FD is closed. */
std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore() std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore()
@ -1389,7 +1347,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
for (auto & link : readDirectory(linksDir)) { for (auto & link : readDirectory(linksDir)) {
printMsg(lvlTalkative, "checking contents of '%s'", link.name); printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name; Path linkPath = linksDir + "/" + link.name;
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false); PosixSourceAccessor accessor;
std::string hash = hashPath(
accessor, CanonPath { linkPath },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
if (hash != link.name) { if (hash != link.name) {
printError("link '%s' was modified! expected hash '%s', got '%s'", printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash); linkPath, link.name, hash);
@ -1696,42 +1657,6 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
} }
} }
ContentAddress LocalStore::hashCAPath(
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
const StorePath & path)
{
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
}
ContentAddress LocalStore::hashCAPath(
const ContentAddressMethod & method,
const HashAlgorithm & hashAlgo,
const Path & path,
const std::string_view pathHash
)
{
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
std::visit(overloaded {
[&](const TextIngestionMethod &) {
readFile(path, caSink);
},
[&](const FileIngestionMethod & m2) {
switch (m2) {
case FileIngestionMethod::Recursive:
dumpPath(path, caSink);
break;
case FileIngestionMethod::Flat:
readFile(path, caSink);
break;
}
},
}, method.raw);
return ContentAddress {
.method = method,
.hash = caSink.finish().first,
};
}
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
{ {
assert(drvPath.isDerivation()); assert(drvPath.isDerivation());

View file

@ -177,12 +177,11 @@ public:
void addToStore(const ValidPathInfo & info, Source & source, void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, std::string_view name, StorePath addToStoreFromDump(
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override; Source & dump,
StorePath addTextToStore(
std::string_view name, std::string_view name,
std::string_view s, ContentAddressMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references, const StorePathSet & references,
RepairFlag repair) override; RepairFlag repair) override;
@ -350,19 +349,6 @@ private:
void signPathInfo(ValidPathInfo & info); void signPathInfo(ValidPathInfo & info);
void signRealisation(Realisation &); void signRealisation(Realisation &);
// XXX: Make a generic `Store` method
ContentAddress hashCAPath(
const ContentAddressMethod & method,
const HashAlgorithm & hashAlgo,
const StorePath & path);
ContentAddress hashCAPath(
const ContentAddressMethod & method,
const HashAlgorithm & hashAlgo,
const Path & path,
const std::string_view pathHash
);
void addBuildLog(const StorePath & drvPath, std::string_view log) override; void addBuildLog(const StorePath & drvPath, std::string_view log) override;
friend struct LocalDerivationGoal; friend struct LocalDerivationGoal;

View file

@ -2,6 +2,7 @@
#include "globals.hh" #include "globals.hh"
#include "signals.hh" #include "signals.hh"
#include "posix-fs-canonicalise.hh" #include "posix-fs-canonicalise.hh"
#include "posix-source-accessor.hh"
#include <cstdlib> #include <cstdlib>
#include <cstring> #include <cstring>
@ -146,7 +147,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */ the contents of the target (which may not even exist). */
Hash hash = hashPath(HashAlgorithm::SHA256, path).first; Hash hash = ({
PosixSourceAccessor accessor;
hashPath(
accessor, CanonPath { path },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
});
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
/* Check if this is a known hash. */ /* Check if this is a known hash. */
@ -156,7 +162,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
if (pathExists(linkPath)) { if (pathExists(linkPath)) {
auto stLink = lstat(linkPath); auto stLink = lstat(linkPath);
if (st.st_size != stLink.st_size if (st.st_size != stLink.st_size
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first)) || (repair && hash != ({
PosixSourceAccessor accessor;
hashPath(
accessor, CanonPath { linkPath },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
})))
{ {
// XXX: Consider overwriting linkPath with our valid version. // XXX: Consider overwriting linkPath with our valid version.
warn("removing corrupted link '%s'", linkPath); warn("removing corrupted link '%s'", linkPath);

View file

@ -502,8 +502,13 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
} }
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name, StorePath RemoteStore::addToStoreFromDump(
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) Source & dump,
std::string_view name,
ContentAddressMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair)
{ {
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path; return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
} }
@ -603,16 +608,6 @@ void RemoteStore::addMultipleToStore(
} }
StorePath RemoteStore::addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair)
{
StringSource source(s);
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
}
void RemoteStore::registerDrvOutput(const Realisation & info) void RemoteStore::registerDrvOutput(const Realisation & info)
{ {
auto conn(getConnection()); auto conn(getConnection());

View file

@ -82,10 +82,15 @@ public:
RepairFlag repair); RepairFlag repair);
/** /**
* Add a content-addressable store path. Does not support references. `dump` will be drained. * Add a content-addressable store path. `dump` will be drained.
*/ */
StorePath addToStoreFromDump(Source & dump, std::string_view name, StorePath addToStoreFromDump(
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override; Source & dump,
std::string_view name,
ContentAddressMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair) override;
void addToStore(const ValidPathInfo & info, Source & nar, void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
@ -101,12 +106,6 @@ public:
RepairFlag repair, RepairFlag repair,
CheckSigsFlag checkSigs) override; CheckSigsFlag checkSigs) override;
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair) override;
void registerDrvOutput(const Realisation & info) override; void registerDrvOutput(const Realisation & info) override;
void queryRealisationUncached(const DrvOutput &, void queryRealisationUncached(const DrvOutput &,

View file

@ -205,25 +205,19 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
} }
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
{
assert(info.hash.algo == HashAlgorithm::SHA256);
return makeStorePath(
makeType(*this, "text", StoreReferences {
.others = info.references,
.self = false,
}),
info.hash,
name);
}
StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const
{ {
// New template // New template
return std::visit(overloaded { return std::visit(overloaded {
[&](const TextInfo & ti) { [&](const TextInfo & ti) {
return makeTextPath(name, ti); assert(ti.hash.algo == HashAlgorithm::SHA256);
return makeStorePath(
makeType(*this, "text", StoreReferences {
.others = ti.references,
.self = false,
}),
ti.hash,
name);
}, },
[&](const FixedOutputInfo & foi) { [&](const FixedOutputInfo & foi) {
return makeFixedOutputPath(name, foi); return makeFixedOutputPath(name, foi);
@ -232,54 +226,45 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const
} }
std::pair<StorePath, Hash> StoreDirConfig::computeStorePathFromDump( std::pair<StorePath, Hash> StoreDirConfig::computeStorePath(
Source & dump,
std::string_view name, std::string_view name,
FileIngestionMethod method, SourceAccessor & accessor,
const CanonPath & path,
ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
const StorePathSet & references) const const StorePathSet & references,
PathFilter & filter) const
{ {
HashSink sink(hashAlgo); auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
dump.drainInto(sink); return {
auto h = sink.finish().first; makeFixedOutputPathFromCA(
FixedOutputInfo caInfo { name,
.method = method, ContentAddressWithReferences::fromParts(
.hash = h, method,
.references = {}, h,
{
.others = references,
.self = false,
})),
h,
}; };
return std::make_pair(makeFixedOutputPath(name, caInfo), h);
}
StorePath StoreDirConfig::computeStorePathForText(
std::string_view name,
std::string_view s,
const StorePathSet & references) const
{
return makeTextPath(name, TextInfo {
.hash = hashString(HashAlgorithm::SHA256, s),
.references = references,
});
} }
StorePath Store::addToStore( StorePath Store::addToStore(
std::string_view name, std::string_view name,
const Path & _srcPath, SourceAccessor & accessor,
FileIngestionMethod method, const CanonPath & path,
ContentAddressMethod method,
HashAlgorithm hashAlgo, HashAlgorithm hashAlgo,
const StorePathSet & references,
PathFilter & filter, PathFilter & filter,
RepairFlag repair, RepairFlag repair)
const StorePathSet & references)
{ {
Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
if (method == FileIngestionMethod::Recursive) dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter);
dumpPath(srcPath, sink, filter);
else
readFile(srcPath, sink);
}); });
return addToStoreFromDump(*source, name, method, hashAlgo, repair, references); return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
} }
void Store::addMultipleToStore( void Store::addMultipleToStore(
@ -404,8 +389,12 @@ digraph graphname {
fileSink -> caHashSink fileSink -> caHashSink
} }
*/ */
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, ValidPathInfo Store::addToStoreSlow(
FileIngestionMethod method, HashAlgorithm hashAlgo, std::string_view name,
SourceAccessor & accessor,
const CanonPath & srcPath,
ContentAddressMethod method, HashAlgorithm hashAlgo,
const StorePathSet & references,
std::optional<Hash> expectedCAHash) std::optional<Hash> expectedCAHash)
{ {
HashSink narHashSink { HashAlgorithm::SHA256 }; HashSink narHashSink { HashAlgorithm::SHA256 };
@ -425,7 +414,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
srcPath. The fact that we use scratchpadSink as a temporary buffer here srcPath. The fact that we use scratchpadSink as a temporary buffer here
is an implementation detail. */ is an implementation detail. */
auto fileSource = sinkToSource([&](Sink & scratchpadSink) { auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
dumpPath(srcPath, scratchpadSink); accessor.dumpPath(srcPath, scratchpadSink);
}); });
/* tapped provides the same data as fileSource, but we also write all the /* tapped provides the same data as fileSource, but we also write all the
@ -433,9 +422,11 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
TeeSource tapped { *fileSource, narSink }; TeeSource tapped { *fileSource, narSink };
NullParseSink blank; NullParseSink blank;
auto & parseSink = method == FileIngestionMethod::Flat auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat
? (ParseSink &) fileSink ? (ParseSink &) fileSink
: (ParseSink &) blank; : method.getFileIngestionMethod() == FileIngestionMethod::Recursive
? (ParseSink &) blank
: (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases
/* The information that flows from tapped (besides being replicated in /* The information that flows from tapped (besides being replicated in
narSink), is now put in parseSink. */ narSink), is now put in parseSink. */
@ -452,21 +443,24 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
if (expectedCAHash && expectedCAHash != hash) if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath); throw Error("hash mismatch for '%s'", srcPath);
ValidPathInfo info { ValidPathInfo info {
*this, *this,
name, name,
FixedOutputInfo { ContentAddressWithReferences::fromParts(
.method = method, method,
.hash = hash, hash,
.references = {}, {
}, .others = references,
.self = false,
}),
narHash, narHash,
}; };
info.narSize = narSize; info.narSize = narSize;
if (!isValidPath(info.path)) { if (!isValidPath(info.path)) {
auto source = sinkToSource([&](Sink & scratchpadSink) { auto source = sinkToSource([&](Sink & scratchpadSink) {
dumpPath(srcPath, scratchpadSink); accessor.dumpPath(srcPath, scratchpadSink);
}); });
addToStore(info, *source); addToStore(info, *source);
} }

View file

@ -428,20 +428,26 @@ public:
*/ */
virtual StorePath addToStore( virtual StorePath addToStore(
std::string_view name, std::string_view name,
const Path & srcPath, SourceAccessor & accessor,
FileIngestionMethod method = FileIngestionMethod::Recursive, const CanonPath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(),
PathFilter & filter = defaultPathFilter, PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair, RepairFlag repair = NoRepair);
const StorePathSet & references = StorePathSet());
/** /**
* Copy the contents of a path to the store and register the * Copy the contents of a path to the store and register the
* validity the resulting path, using a constant amount of * validity the resulting path, using a constant amount of
* memory. * memory.
*/ */
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath, ValidPathInfo addToStoreSlow(
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, std::string_view name,
SourceAccessor & accessor,
const CanonPath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = StorePathSet(),
std::optional<Hash> expectedCAHash = {}); std::optional<Hash> expectedCAHash = {});
/** /**
@ -453,20 +459,14 @@ public:
* *
* \todo remove? * \todo remove?
*/ */
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name, virtual StorePath addToStoreFromDump(
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, Source & dump,
const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); }
/**
* Like addToStore, but the contents written to the output path is a
* regular file containing the given string.
*/
virtual StorePath addTextToStore(
std::string_view name, std::string_view name,
std::string_view s, ContentAddressMethod method = FileIngestionMethod::Recursive,
const StorePathSet & references, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
RepairFlag repair = NoRepair) = 0; const StorePathSet & references = StorePathSet(),
RepairFlag repair = NoRepair)
{ unsupported("addToStoreFromDump"); }
/** /**
* Add a mapping indicating that `deriver!outputName` maps to the output path * Add a mapping indicating that `deriver!outputName` maps to the output path

View file

@ -86,41 +86,20 @@ struct StoreDirConfig : public Config
StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const; StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const;
StorePath makeTextPath(std::string_view name, const TextInfo & info) const;
StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const; StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const;
/** /**
* Read-only variant of addToStoreFromDump(). It returns the store * Read-only variant of addToStore(). It returns the store
* path to which a NAR or flat file would be written. * path for the given file sytem object.
*/ */
std::pair<StorePath, Hash> computeStorePathFromDump( std::pair<StorePath, Hash> computeStorePath(
Source & dump,
std::string_view name, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, SourceAccessor & accessor,
const CanonPath & path,
ContentAddressMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
const StorePathSet & references = {}) const; const StorePathSet & references = {},
PathFilter & filter = defaultPathFilter) const;
/**
* Preparatory part of addTextToStore().
*
* !!! Computation of the path should take the references given to
* addTextToStore() into account, otherwise we have a (relatively
* minor) security hole: a caller can register a source file with
* bogus references. If there are too many references, the path may
* not be garbage collected when it has to be (not really a problem,
* the caller could create a root anyway), or it may be garbage
* collected when it shouldn't be (more serious).
*
* Hashing the references would solve this (bogus references would
* simply yield a different store path, so other users wouldn't be
* affected), but it has some backwards compatibility issues (the
* hashing scheme changes), so I'm not doing that for now.
*/
StorePath computeStorePathForText(
std::string_view name,
std::string_view s,
const StorePathSet & references) const;
}; };
} }

View file

@ -173,10 +173,9 @@ static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").h
static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr<AbstractPos> & pos) { static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr<AbstractPos> & pos) {
bool hasPos = pos && *pos; bool hasPos = pos && *pos;
if (hasPos) { if (hasPos) {
oss << "\n" << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":";
if (auto loc = pos->getCodeLines()) { if (auto loc = pos->getCodeLines()) {
oss << "\n";
printCodeLines(oss, "", *pos, *loc); printCodeLines(oss, "", *pos, *loc);
oss << "\n"; oss << "\n";
} }

View file

@ -0,0 +1,49 @@
#include "file-content-address.hh"
#include "archive.hh"
namespace nix {
void dumpPath(
SourceAccessor & accessor, const CanonPath & path,
Sink & sink,
FileIngestionMethod method,
PathFilter & filter)
{
switch (method) {
case FileIngestionMethod::Flat:
accessor.readFile(path, sink);
break;
case FileIngestionMethod::Recursive:
accessor.dumpPath(path, sink, filter);
break;
}
}
void restorePath(
const Path & path,
Source & source,
FileIngestionMethod method)
{
switch (method) {
case FileIngestionMethod::Flat:
writeFile(path, source);
break;
case FileIngestionMethod::Recursive:
restorePath(path, source);
break;
}
}
HashResult hashPath(
SourceAccessor & accessor, const CanonPath & path,
FileIngestionMethod method, HashAlgorithm ht,
PathFilter & filter)
{
HashSink sink { ht };
dumpPath(accessor, path, sink, method, filter);
return sink.finish();
}
}

View file

@ -0,0 +1,56 @@
#pragma once
///@file
#include "source-accessor.hh"
#include "fs-sink.hh"
#include "util.hh"
namespace nix {
/**
* An enumeration of the main ways we can serialize file system
* objects.
*/
enum struct FileIngestionMethod : uint8_t {
/**
* Flat-file hashing. Directly ingest the contents of a single file
*/
Flat = 0,
/**
* Recursive (or NAR) hashing. Serializes the file-system object in
* Nix Archive format and ingest that.
*/
Recursive = 1,
};
/**
* Dump a serialization of the given file system object.
*/
void dumpPath(
SourceAccessor & accessor, const CanonPath & path,
Sink & sink,
FileIngestionMethod method,
PathFilter & filter = defaultPathFilter);
/**
* Restore a serialization of the given file system object.
*
* @TODO use an arbitrary `ParseSink`.
*/
void restorePath(
const Path & path,
Source & source,
FileIngestionMethod method);
/**
* Compute the hash of the given file system object according to the
* given method.
*
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
*/
HashResult hashPath(
SourceAccessor & accessor, const CanonPath & path,
FileIngestionMethod method, HashAlgorithm ht,
PathFilter & filter = defaultPathFilter);
}

View file

@ -367,15 +367,6 @@ HashResult HashSink::currentHash()
} }
HashResult hashPath(
HashAlgorithm ha, const Path & path, PathFilter & filter)
{
HashSink sink(ha);
dumpPath(path, sink, filter);
return sink.finish();
}
Hash compressHash(const Hash & hash, unsigned int newSize) Hash compressHash(const Hash & hash, unsigned int newSize)
{ {
Hash h(hash.algo); Hash h(hash.algo);

View file

@ -168,14 +168,11 @@ Hash hashString(HashAlgorithm ha, std::string_view s);
Hash hashFile(HashAlgorithm ha, const Path & path); Hash hashFile(HashAlgorithm ha, const Path & path);
/** /**
* Compute the hash of the given path, serializing as a Nix Archive and * The final hash and the number of bytes digested.
* then hashing that.
* *
* The hash is defined as (essentially) hashString(ht, dumpPath(path)). * @todo Convert to proper struct
*/ */
typedef std::pair<Hash, uint64_t> HashResult; typedef std::pair<Hash, uint64_t> HashResult;
HashResult hashPath(HashAlgorithm ha, const Path & path,
PathFilter & filter = defaultPathFilter);
/** /**
* Compress a hash to the specified number of bytes by cyclically * Compress a hash to the specified number of bytes by cyclically

View file

@ -19,13 +19,15 @@ const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncod
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?"; const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])"; const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*"; const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
const static std::string fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*";
const static std::string segmentRegex = "(?:" + pcharRegex + "*)"; const static std::string segmentRegex = "(?:" + pcharRegex + "*)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)"; const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)"; const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
/// A Git ref (i.e. branch or tag name). /// A Git ref (i.e. branch or tag name).
/// \todo check that this is correct. /// \todo check that this is correct.
const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-]*"; /// This regex incomplete. See https://git-scm.com/docs/git-check-ref-format
const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@+-]*";
extern std::regex refRegex; extern std::regex refRegex;
/// Instead of defining what a good Git Ref is, we define what a bad Git Ref is /// Instead of defining what a good Git Ref is, we define what a bad Git Ref is

View file

@ -16,7 +16,7 @@ ParsedURL parseURL(const std::string & url)
"((" + schemeNameRegex + "):" "((" + schemeNameRegex + "):"
+ "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))" + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))"
+ "(?:\\?(" + queryRegex + "))?" + "(?:\\?(" + queryRegex + "))?"
+ "(?:#(" + queryRegex + "))?", + "(?:#(" + fragmentRegex + "))?",
std::regex::ECMAScript); std::regex::ECMAScript);
std::smatch match; std::smatch match;

View file

@ -104,10 +104,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
/* Also write a copy of the list of user environment elements to /* Also write a copy of the list of user environment elements to
the store; we need it for future modifications of the the store; we need it for future modifications of the
environment. */ environment. */
auto manifestFile = ({
std::ostringstream str; std::ostringstream str;
manifest.print(state.symbols, str, true); manifest.print(state.symbols, str, true);
auto manifestFile = state.store->addTextToStore("env-manifest.nix", // TODO with C++20 we can use str.view() instead and avoid copy.
str.str(), references); std::string str2 = str.str();
StringSource source { str2 };
state.store->addToStoreFromDump(
source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references);
});
/* Get the environment builder expression. */ /* Get the environment builder expression. */
Value envBuilder; Value envBuilder;

View file

@ -13,6 +13,7 @@
#include "shared.hh" #include "shared.hh"
#include "graphml.hh" #include "graphml.hh"
#include "legacy.hh" #include "legacy.hh"
#include "posix-source-accessor.hh"
#include "path-with-outputs.hh" #include "path-with-outputs.hh"
#include "posix-fs-canonicalise.hh" #include "posix-fs-canonicalise.hh"
@ -175,8 +176,12 @@ static void opAdd(Strings opFlags, Strings opArgs)
{ {
if (!opFlags.empty()) throw UsageError("unknown flag"); if (!opFlags.empty()) throw UsageError("unknown flag");
PosixSourceAccessor accessor;
for (auto & i : opArgs) for (auto & i : opArgs)
cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i))); cout << fmt("%s\n", store->printStorePath(store->addToStore(
std::string(baseNameOf(i)),
accessor,
CanonPath::fromCwd(i))));
} }
@ -196,8 +201,14 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front());
opArgs.pop_front(); opArgs.pop_front();
PosixSourceAccessor accessor;
for (auto & i : opArgs) for (auto & i : opArgs)
std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), i, method, hashAlgo).path)); std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(
baseNameOf(i),
accessor,
CanonPath::fromCwd(i),
method,
hashAlgo).path));
} }
@ -541,7 +552,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (canonicalise) if (canonicalise)
canonicalisePathMetaData(store->printStorePath(info->path), {}); canonicalisePathMetaData(store->printStorePath(info->path), {});
if (!hashGiven) { if (!hashGiven) {
HashResult hash = hashPath(HashAlgorithm::SHA256, store->printStorePath(info->path)); HashResult hash = hashPath(
*store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
info->narHash = hash.first; info->narHash = hash.first;
info->narSize = hash.second; info->narSize = hash.second;
} }

View file

@ -2,6 +2,7 @@
#include "common-args.hh" #include "common-args.hh"
#include "store-api.hh" #include "store-api.hh"
#include "archive.hh" #include "archive.hh"
#include "posix-source-accessor.hh"
using namespace nix; using namespace nix;
@ -20,7 +21,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{ {
Path path; Path path;
std::optional<std::string> namePart; std::optional<std::string> namePart;
FileIngestionMethod ingestionMethod = FileIngestionMethod::Recursive; ContentAddressMethod caMethod = FileIngestionMethod::Recursive;
CmdAddToStore() CmdAddToStore()
{ {
@ -48,7 +49,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
)", )",
.labels = {"hash-mode"}, .labels = {"hash-mode"},
.handler = {[this](std::string s) { .handler = {[this](std::string s) {
this->ingestionMethod = parseIngestionMethod(s); this->caMethod = parseIngestionMethod(s);
}}, }},
}); });
} }
@ -57,36 +58,17 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{ {
if (!namePart) namePart = baseNameOf(path); if (!namePart) namePart = baseNameOf(path);
StringSink sink; PosixSourceAccessor accessor;
dumpPath(path, sink);
auto narHash = hashString(HashAlgorithm::SHA256, sink.s); auto path2 = CanonPath::fromCwd(path);
Hash hash = narHash; auto storePath = dryRun
if (ingestionMethod == FileIngestionMethod::Flat) { ? store->computeStorePath(
HashSink hsink(HashAlgorithm::SHA256); *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first
readFile(path, hsink); : store->addToStoreSlow(
hash = hsink.finish().first; *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path;
}
ValidPathInfo info { logger->cout("%s", store->printStorePath(storePath));
*store,
std::move(*namePart),
FixedOutputInfo {
.method = std::move(ingestionMethod),
.hash = std::move(hash),
.references = {},
},
narHash,
};
info.narSize = sink.s.size();
if (!dryRun) {
auto source = StringSource(sink.s);
store->addToStore(info, source);
}
logger->cout("%s", store->printStorePath(info.path));
} }
}; };
@ -110,7 +92,7 @@ struct CmdAddFile : CmdAddToStore
{ {
CmdAddFile() CmdAddFile()
{ {
ingestionMethod = FileIngestionMethod::Flat; caMethod = FileIngestionMethod::Flat;
} }
std::string description() override std::string description() override

View file

@ -223,7 +223,11 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
if (builder != "bash") if (builder != "bash")
throw Error("'nix develop' only works on derivations that use 'bash' as their builder"); throw Error("'nix develop' only works on derivations that use 'bash' as their builder");
auto getEnvShPath = evalStore->addTextToStore("get-env.sh", getEnvSh, {}); auto getEnvShPath = ({
StringSource source { getEnvSh };
evalStore->addToStoreFromDump(
source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {});
});
drv.args = {store->printStorePath(getEnvShPath)}; drv.args = {store->printStorePath(getEnvShPath)};

View file

@ -5,6 +5,7 @@
#include "shared.hh" #include "shared.hh"
#include "references.hh" #include "references.hh"
#include "archive.hh" #include "archive.hh"
#include "posix-source-accessor.hh"
using namespace nix; using namespace nix;
@ -88,14 +89,8 @@ struct CmdHashBase : Command
else else
hashSink = std::make_unique<HashSink>(ha); hashSink = std::make_unique<HashSink>(ha);
switch (mode) { PosixSourceAccessor accessor;
case FileIngestionMethod::Flat: dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode);
readFile(path, *hashSink);
break;
case FileIngestionMethod::Recursive:
dumpPath(path, *hashSink);
break;
}
Hash h = hashSink->finish().first; Hash h = hashSink->finish().first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20); if (truncate && h.hashSize > 20) h = compressHash(h, 20);

View file

@ -9,6 +9,7 @@
#include "attr-path.hh" #include "attr-path.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "legacy.hh" #include "legacy.hh"
#include "posix-source-accessor.hh"
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
@ -122,7 +123,11 @@ std::tuple<StorePath, Hash> prefetchFile(
Activity act(*logger, lvlChatty, actUnknown, Activity act(*logger, lvlChatty, actUnknown,
fmt("adding '%s' to the store", url)); fmt("adding '%s' to the store", url));
auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashAlgo, expectedHash); PosixSourceAccessor accessor;
auto info = store->addToStoreSlow(
*name,
accessor, CanonPath::fromCwd(tmpFile),
ingestionMethod, hashAlgo, {}, expectedHash);
storePath = info.path; storePath = info.path;
assert(info.ca); assert(info.ca);
hash = info.ca->hash; hash = info.ca->hash;

View file

@ -6,12 +6,14 @@ R""(
```console ```console
# nix profile list # nix profile list
Name: gdb
Index: 0 Index: 0
Flake attribute: legacyPackages.x86_64-linux.gdb Flake attribute: legacyPackages.x86_64-linux.gdb
Original flake URL: flake:nixpkgs Original flake URL: flake:nixpkgs
Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca
Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1 Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1
Name: blender-bin
Index: 1 Index: 1
Flake attribute: packages.x86_64-linux.default Flake attribute: packages.x86_64-linux.default
Original flake URL: flake:blender-bin Original flake URL: flake:blender-bin
@ -26,7 +28,7 @@ R""(
# nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default # nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default
``` ```
will build the package with index 1 shown above. will build the package `blender-bin` shown above.
# Description # Description
@ -34,10 +36,14 @@ This command shows what packages are currently installed in a
profile. For each installed package, it shows the following profile. For each installed package, it shows the following
information: information:
* `Index`: An integer that can be used to unambiguously identify the * `Name`: A unique name used to unambiguously identify the
package in invocations of `nix profile remove` and `nix profile package in invocations of `nix profile remove` and `nix profile
upgrade`. upgrade`.
* `Index`: An integer that can be used to unambiguously identify the
package in invocations of `nix profile remove` and `nix profile upgrade`.
(*Deprecated, will be removed in a future version in favor of `Name`.*)
* `Flake attribute`: The flake output attribute path that provides the * `Flake attribute`: The flake output attribute path that provides the
package (e.g. `packages.x86_64-linux.hello`). package (e.g. `packages.x86_64-linux.hello`).

View file

@ -2,18 +2,19 @@ R""(
# Examples # Examples
* Remove a package by position: * Remove a package by name:
```console
# nix profile remove hello
```
* Remove a package by index
*(deprecated, will be removed in a future version)*:
```console ```console
# nix profile remove 3 # nix profile remove 3
``` ```
* Remove a package by attribute path:
```console
# nix profile remove packages.x86_64-linux.hello
```
* Remove all packages: * Remove all packages:
```console ```console

View file

@ -9,18 +9,16 @@ R""(
# nix profile upgrade '.*' # nix profile upgrade '.*'
``` ```
* Upgrade a specific package: * Upgrade a specific package by name:
```console ```console
# nix profile upgrade packages.x86_64-linux.hello # nix profile upgrade hello
``` ```
* Upgrade a specific profile element by number: * Upgrade a specific package by index
*(deprecated, will be removed in a future version)*:
```console ```console
# nix profile list
0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify …
# nix profile upgrade 0 # nix profile upgrade 0
``` ```

View file

@ -10,6 +10,8 @@
#include "../nix-env/user-env.hh" #include "../nix-env/user-env.hh"
#include "profiles.hh" #include "profiles.hh"
#include "names.hh" #include "names.hh"
#include "url.hh"
#include "flake/url-name.hh"
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
#include <regex> #include <regex>
@ -43,6 +45,7 @@ const int defaultPriority = 5;
struct ProfileElement struct ProfileElement
{ {
StorePathSet storePaths; StorePathSet storePaths;
std::string name;
std::optional<ProfileElementSource> source; std::optional<ProfileElementSource> source;
bool active = true; bool active = true;
int priority = defaultPriority; int priority = defaultPriority;
@ -116,6 +119,8 @@ struct ProfileManifest
if (pathExists(manifestPath)) { if (pathExists(manifestPath)) {
auto json = nlohmann::json::parse(readFile(manifestPath)); auto json = nlohmann::json::parse(readFile(manifestPath));
/* Keep track of already found names to allow preventing duplicates. */
std::set<std::string> foundNames;
auto version = json.value("version", 0); auto version = json.value("version", 0);
std::string sUrl; std::string sUrl;
@ -149,6 +154,25 @@ struct ProfileManifest
e["outputs"].get<ExtendedOutputsSpec>() e["outputs"].get<ExtendedOutputsSpec>()
}; };
} }
std::string nameCandidate = element.identifier();
if (e.contains("name")) {
nameCandidate = e["name"];
}
else if (element.source) {
auto url = parseURL(element.source->to_string());
auto name = getNameFromURL(url);
if (name)
nameCandidate = *name;
}
auto finalName = nameCandidate;
for (int i = 1; foundNames.contains(finalName); ++i) {
finalName = nameCandidate + std::to_string(i);
}
element.name = finalName;
foundNames.insert(element.name);
elements.emplace_back(std::move(element)); elements.emplace_back(std::move(element));
} }
} }
@ -163,6 +187,7 @@ struct ProfileManifest
for (auto & drvInfo : drvInfos) { for (auto & drvInfo : drvInfos) {
ProfileElement element; ProfileElement element;
element.storePaths = {drvInfo.queryOutPath()}; element.storePaths = {drvInfo.queryOutPath()};
element.name = element.identifier();
elements.emplace_back(std::move(element)); elements.emplace_back(std::move(element));
} }
} }
@ -451,15 +476,25 @@ public:
{ {
std::vector<Matcher> res; std::vector<Matcher> res;
auto anyIndexMatchers = false;
for (auto & s : _matchers) { for (auto & s : _matchers) {
if (auto n = string2Int<size_t>(s)) if (auto n = string2Int<size_t>(s)) {
res.push_back(*n); res.push_back(*n);
anyIndexMatchers = true;
}
else if (store->isStorePath(s)) else if (store->isStorePath(s))
res.push_back(s); res.push_back(s);
else else
res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)}); res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)});
} }
if (anyIndexMatchers) {
warn("Indices are deprecated and will be removed in a future version!\n"
" Refer to packages by their `Name` as printed by `nix profile list`.\n"
" See https://github.com/NixOS/nix/issues/9171 for more information.");
}
return res; return res;
} }
@ -471,8 +506,7 @@ public:
} else if (auto path = std::get_if<Path>(&matcher)) { } else if (auto path = std::get_if<Path>(&matcher)) {
if (element.storePaths.count(store.parseStorePath(*path))) return true; if (element.storePaths.count(store.parseStorePath(*path))) return true;
} else if (auto regex = std::get_if<RegexPattern>(&matcher)) { } else if (auto regex = std::get_if<RegexPattern>(&matcher)) {
if (element.source if (std::regex_match(element.name, regex->reg))
&& std::regex_match(element.source->attrPath, regex->reg))
return true; return true;
} }
} }
@ -556,14 +590,32 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
Installables installables; Installables installables;
std::vector<size_t> indices; std::vector<size_t> indices;
auto matchedCount = 0;
auto upgradedCount = 0; auto upgradedCount = 0;
for (size_t i = 0; i < manifest.elements.size(); ++i) { for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]); auto & element(manifest.elements[i]);
if (element.source if (!matches(*store, element, i, matchers)) {
&& !element.source->originalRef.input.isLocked() continue;
&& matches(*store, element, i, matchers)) }
{
matchedCount++;
if (!element.source) {
warn(
"Found package '%s', but it was not installed from a flake, so it can't be checked for upgrades!",
element.identifier()
);
continue;
}
if (element.source->originalRef.input.isLocked()) {
warn(
"Found package '%s', but it was installed from a locked flake reference so it can't be upgraded!",
element.identifier()
);
continue;
}
upgradedCount++; upgradedCount++;
Activity act(*logger, lvlChatty, actUnknown, Activity act(*logger, lvlChatty, actUnknown,
@ -601,9 +653,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
installables.push_back(installable); installables.push_back(installable);
indices.push_back(i); indices.push_back(i);
} }
}
if (upgradedCount == 0) { if (upgradedCount == 0) {
if (matchedCount == 0) {
for (auto & matcher : matchers) { for (auto & matcher : matchers) {
if (const size_t * index = std::get_if<size_t>(&matcher)){ if (const size_t * index = std::get_if<size_t>(&matcher)){
warn("'%d' is not a valid index", *index); warn("'%d' is not a valid index", *index);
@ -613,6 +665,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
warn("'%s' does not match any packages", regex->pattern); warn("'%s' does not match any packages", regex->pattern);
} }
} }
} else {
warn("Found some packages but none of them could be upgraded.");
}
warn ("Use 'nix profile list' to see the current profile."); warn ("Use 'nix profile list' to see the current profile.");
} }
@ -657,9 +712,10 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
for (size_t i = 0; i < manifest.elements.size(); ++i) { for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]); auto & element(manifest.elements[i]);
if (i) logger->cout(""); if (i) logger->cout("");
logger->cout("Index: " ANSI_BOLD "%s" ANSI_NORMAL "%s", logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s",
i, element.name,
element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL);
logger->cout("Index: %s", i);
if (element.source) { if (element.source) {
logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string());
logger->cout("Original flake URL: %s", element.source->originalRef.to_string()); logger->cout("Original flake URL: %s", element.source->originalRef.to_string());

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'abort' builtin … while calling the 'abort' builtin
at /pwd/lang/eval-fail-abort.nix:1:14: at /pwd/lang/eval-fail-abort.nix:1:14:
1| if true then abort "this should fail" else 1 1| if true then abort "this should fail" else 1
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'addDrvOutputDependencies' builtin … while calling the 'addDrvOutputDependencies' builtin
at /pwd/lang/eval-fail-addDrvOutputDependencies-empty-context.nix:1:1: at /pwd/lang/eval-fail-addDrvOutputDependencies-empty-context.nix:1:1:
1| builtins.addDrvOutputDependencies "" 1| builtins.addDrvOutputDependencies ""
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'addDrvOutputDependencies' builtin … while calling the 'addDrvOutputDependencies' builtin
at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:18:4: at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:18:4:
17| 17|
18| in builtins.addDrvOutputDependencies combo-path 18| in builtins.addDrvOutputDependencies combo-path
| ^ | ^

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'addDrvOutputDependencies' builtin … while calling the 'addDrvOutputDependencies' builtin
at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:9:4: at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:9:4:
8| 8|
9| in builtins.addDrvOutputDependencies drv.outPath 9| in builtins.addDrvOutputDependencies drv.outPath
| ^ | ^

View file

@ -1,35 +1,27 @@
error: error:
… while evaluating the attribute 'body' … while evaluating the attribute 'body'
at /pwd/lang/eval-fail-assert.nix:4:3: at /pwd/lang/eval-fail-assert.nix:4:3:
3| 3|
4| body = x "x"; 4| body = x "x";
| ^ | ^
5| } 5| }
… from call site … from call site
at /pwd/lang/eval-fail-assert.nix:4:10: at /pwd/lang/eval-fail-assert.nix:4:10:
3| 3|
4| body = x "x"; 4| body = x "x";
| ^ | ^
5| } 5| }
… while calling 'x' … while calling 'x'
at /pwd/lang/eval-fail-assert.nix:2:7: at /pwd/lang/eval-fail-assert.nix:2:7:
1| let { 1| let {
2| x = arg: assert arg == "y"; 123; 2| x = arg: assert arg == "y"; 123;
| ^ | ^
3| 3|
error: assertion '(arg == "y")' failed error: assertion '(arg == "y")' failed
at /pwd/lang/eval-fail-assert.nix:2:12: at /pwd/lang/eval-fail-assert.nix:2:12:
1| let { 1| let {
2| x = arg: assert arg == "y"; 123; 2| x = arg: assert arg == "y"; 123;
| ^ | ^

View file

@ -1,17 +1,13 @@
error: error:
… while evaluating the attribute 'puppy."${key}"' … while evaluating the attribute 'puppy."${key}"'
at /pwd/lang/eval-fail-attr-name-type.nix:3:5: at /pwd/lang/eval-fail-attr-name-type.nix:3:5:
2| attrs = { 2| attrs = {
3| puppy.doggy = {}; 3| puppy.doggy = {};
| ^ | ^
4| }; 4| };
… while evaluating an attribute name … while evaluating an attribute name
at /pwd/lang/eval-fail-attr-name-type.nix:7:17: at /pwd/lang/eval-fail-attr-name-type.nix:7:17:
6| in 6| in
7| attrs.puppy.${key} 7| attrs.puppy.${key}
| ^ | ^

View file

@ -1,8 +1,6 @@
error: error:
… while evaluating a path segment … while evaluating a path segment
at /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:2: at /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:2:
1| "${x: x}" 1| "${x: x}"
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while evaluating a path segment … while evaluating a path segment
at /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:3: at /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:3:
1| ''${x: x}'' 1| ''${x: x}''
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while evaluating a path segment … while evaluating a path segment
at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:9:3: at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:9:3:
8| # The error message should not be too long. 8| # The error message should not be too long.
9| ''${pkgs}'' 9| ''${pkgs}''
| ^ | ^

View file

@ -1,17 +1,13 @@
error: error:
… while evaluating the attribute 'body' … while evaluating the attribute 'body'
at /pwd/lang/eval-fail-blackhole.nix:2:3: at /pwd/lang/eval-fail-blackhole.nix:2:3:
1| let { 1| let {
2| body = x; 2| body = x;
| ^ | ^
3| x = y; 3| x = y;
error: infinite recursion encountered error: infinite recursion encountered
at /pwd/lang/eval-fail-blackhole.nix:3:7: at /pwd/lang/eval-fail-blackhole.nix:3:7:
2| body = x; 2| body = x;
3| x = y; 3| x = y;
| ^ | ^

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'length' builtin … while calling the 'length' builtin
at /pwd/lang/eval-fail-call-primop.nix:1:1: at /pwd/lang/eval-fail-call-primop.nix:1:1:
1| builtins.length 1 1| builtins.length 1
| ^ | ^
2| 2|

View file

@ -1,24 +1,18 @@
error: error:
… while calling the 'deepSeq' builtin … while calling the 'deepSeq' builtin
at /pwd/lang/eval-fail-deepseq.nix:1:1: at /pwd/lang/eval-fail-deepseq.nix:1:1:
1| builtins.deepSeq { x = abort "foo"; } 456 1| builtins.deepSeq { x = abort "foo"; } 456
| ^ | ^
2| 2|
… while evaluating the attribute 'x' … while evaluating the attribute 'x'
at /pwd/lang/eval-fail-deepseq.nix:1:20: at /pwd/lang/eval-fail-deepseq.nix:1:20:
1| builtins.deepSeq { x = abort "foo"; } 456 1| builtins.deepSeq { x = abort "foo"; } 456
| ^ | ^
2| 2|
… while calling the 'abort' builtin … while calling the 'abort' builtin
at /pwd/lang/eval-fail-deepseq.nix:1:24: at /pwd/lang/eval-fail-deepseq.nix:1:24:
1| builtins.deepSeq { x = abort "foo"; } 456 1| builtins.deepSeq { x = abort "foo"; } 456
| ^ | ^
2| 2|

View file

@ -1,17 +1,13 @@
error: error:
… while evaluating the attribute 'set' … while evaluating the attribute 'set'
at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:3: at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:3:
1| { 1| {
2| set = { "${"" + "b"}" = 1; }; 2| set = { "${"" + "b"}" = 1; };
| ^ | ^
3| set = { "${"b" + ""}" = 2; }; 3| set = { "${"b" + ""}" = 2; };
error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11 error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11
at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11: at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11:
2| set = { "${"" + "b"}" = 1; }; 2| set = { "${"" + "b"}" = 1; };
3| set = { "${"b" + ""}" = 2; }; 3| set = { "${"b" + ""}" = 2; };
| ^ | ^

View file

@ -1,35 +1,27 @@
error: error:
… while calling the 'foldl'' builtin … while calling the 'foldl'' builtin
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:1: at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:1:
1| # Tests that the result of applying op is forced even if the value is never used 1| # Tests that the result of applying op is forced even if the value is never used
2| builtins.foldl' 2| builtins.foldl'
| ^ | ^
3| (_: f: f null) 3| (_: f: f null)
… while calling anonymous lambda … while calling anonymous lambda
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7: at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7:
2| builtins.foldl' 2| builtins.foldl'
3| (_: f: f null) 3| (_: f: f null)
| ^ | ^
4| null 4| null
… from call site … from call site
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:10: at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:10:
2| builtins.foldl' 2| builtins.foldl'
3| (_: f: f null) 3| (_: f: f null)
| ^ | ^
4| null 4| null
… while calling anonymous lambda … while calling anonymous lambda
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:6: at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:6:
4| null 4| null
5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ] 5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ]
| ^ | ^

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'fromTOML' builtin … while calling the 'fromTOML' builtin
at /pwd/lang/eval-fail-fromTOML-timestamps.nix:1:1: at /pwd/lang/eval-fail-fromTOML-timestamps.nix:1:1:
1| builtins.fromTOML '' 1| builtins.fromTOML ''
| ^ | ^
2| key = "value" 2| key = "value"

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'toString' builtin … while calling the 'toString' builtin
at /pwd/lang/eval-fail-hashfile-missing.nix:4:3: at /pwd/lang/eval-fail-hashfile-missing.nix:4:3:
3| in 3| in
4| toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"])) 4| toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"]))
| ^ | ^

View file

@ -1,8 +1,6 @@
error: error:
… while evaluating one of the elements to concatenate … while evaluating one of the elements to concatenate
at /pwd/lang/eval-fail-list.nix:1:2: at /pwd/lang/eval-fail-list.nix:1:2:
1| 8++1 1| 8++1
| ^ | ^
2| 2|

View file

@ -1,16 +1,12 @@
error: error:
… from call site … from call site
at /pwd/lang/eval-fail-missing-arg.nix:1:1: at /pwd/lang/eval-fail-missing-arg.nix:1:1:
1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";}
| ^ | ^
2| 2|
error: function 'anonymous lambda' called without required argument 'y' error: function 'anonymous lambda' called without required argument 'y'
at /pwd/lang/eval-fail-missing-arg.nix:1:2: at /pwd/lang/eval-fail-missing-arg.nix:1:2:
1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";}
| ^ | ^
2| 2|

View file

@ -1,16 +1,12 @@
error: error:
… in the argument of the not operator … in the argument of the not operator
at /pwd/lang/eval-fail-not-throws.nix:1:4: at /pwd/lang/eval-fail-not-throws.nix:1:4:
1| ! (throw "uh oh!") 1| ! (throw "uh oh!")
| ^ | ^
2| 2|
… while calling the 'throw' builtin … while calling the 'throw' builtin
at /pwd/lang/eval-fail-not-throws.nix:1:4: at /pwd/lang/eval-fail-not-throws.nix:1:4:
1| ! (throw "uh oh!") 1| ! (throw "uh oh!")
| ^ | ^
2| 2|

View file

@ -1,7 +1,5 @@
error: path has a trailing slash error: path has a trailing slash
at /pwd/lang/eval-fail-path-slash.nix:6:12: at /pwd/lang/eval-fail-path-slash.nix:6:12:
5| # and https://nixos.org/nix-dev/2016-June/020829.html 5| # and https://nixos.org/nix-dev/2016-June/020829.html
6| /nix/store/ 6| /nix/store/
| ^ | ^

View file

@ -1,16 +1,12 @@
error: error:
… in the right operand of the update (//) operator … in the right operand of the update (//) operator
at /pwd/lang/eval-fail-recursion.nix:1:12: at /pwd/lang/eval-fail-recursion.nix:1:12:
1| let a = {} // a; in a.foo 1| let a = {} // a; in a.foo
| ^ | ^
2| 2|
error: infinite recursion encountered error: infinite recursion encountered
at /pwd/lang/eval-fail-recursion.nix:1:15: at /pwd/lang/eval-fail-recursion.nix:1:15:
1| let a = {} // a; in a.foo 1| let a = {} // a; in a.foo
| ^ | ^
2| 2|

View file

@ -1,17 +1,13 @@
error: error:
… while evaluating the attribute 'body' … while evaluating the attribute 'body'
at /pwd/lang/eval-fail-remove.nix:4:3: at /pwd/lang/eval-fail-remove.nix:4:3:
3| 3|
4| body = (removeAttrs attrs ["x"]).x; 4| body = (removeAttrs attrs ["x"]).x;
| ^ | ^
5| } 5| }
error: attribute 'x' missing error: attribute 'x' missing
at /pwd/lang/eval-fail-remove.nix:4:10: at /pwd/lang/eval-fail-remove.nix:4:10:
3| 3|
4| body = (removeAttrs attrs ["x"]).x; 4| body = (removeAttrs attrs ["x"]).x;
| ^ | ^

View file

@ -1,35 +1,27 @@
error: error:
… while evaluating the attribute 'body' … while evaluating the attribute 'body'
at /pwd/lang/eval-fail-scope-5.nix:8:3: at /pwd/lang/eval-fail-scope-5.nix:8:3:
7| 7|
8| body = f {}; 8| body = f {};
| ^ | ^
9| 9|
… from call site … from call site
at /pwd/lang/eval-fail-scope-5.nix:8:10: at /pwd/lang/eval-fail-scope-5.nix:8:10:
7| 7|
8| body = f {}; 8| body = f {};
| ^ | ^
9| 9|
… while calling 'f' … while calling 'f'
at /pwd/lang/eval-fail-scope-5.nix:6:7: at /pwd/lang/eval-fail-scope-5.nix:6:7:
5| 5|
6| f = {x ? y, y ? x}: x + y; 6| f = {x ? y, y ? x}: x + y;
| ^ | ^
7| 7|
error: infinite recursion encountered error: infinite recursion encountered
at /pwd/lang/eval-fail-scope-5.nix:6:12: at /pwd/lang/eval-fail-scope-5.nix:6:12:
5| 5|
6| f = {x ? y, y ? x}: x + y; 6| f = {x ? y, y ? x}: x + y;
| ^ | ^

View file

@ -1,16 +1,12 @@
error: error:
… while calling the 'seq' builtin … while calling the 'seq' builtin
at /pwd/lang/eval-fail-seq.nix:1:1: at /pwd/lang/eval-fail-seq.nix:1:1:
1| builtins.seq (abort "foo") 2 1| builtins.seq (abort "foo") 2
| ^ | ^
2| 2|
… while calling the 'abort' builtin … while calling the 'abort' builtin
at /pwd/lang/eval-fail-seq.nix:1:15: at /pwd/lang/eval-fail-seq.nix:1:15:
1| builtins.seq (abort "foo") 2 1| builtins.seq (abort "foo") 2
| ^ | ^
2| 2|

View file

@ -1,7 +1,5 @@
error: undefined variable 'x' error: undefined variable 'x'
at /pwd/lang/eval-fail-set.nix:1:3: at /pwd/lang/eval-fail-set.nix:1:3:
1| 8.x 1| 8.x
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'substring' builtin … while calling the 'substring' builtin
at /pwd/lang/eval-fail-substring.nix:1:1: at /pwd/lang/eval-fail-substring.nix:1:1:
1| builtins.substring (builtins.sub 0 1) 1 "x" 1| builtins.substring (builtins.sub 0 1) 1 "x"
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while calling the 'toPath' builtin … while calling the 'toPath' builtin
at /pwd/lang/eval-fail-to-path.nix:1:1: at /pwd/lang/eval-fail-to-path.nix:1:1:
1| builtins.toPath "foo/bar" 1| builtins.toPath "foo/bar"
| ^ | ^
2| 2|

View file

@ -1,25 +1,19 @@
error: error:
… while calling the 'toJSON' builtin … while calling the 'toJSON' builtin
at /pwd/lang/eval-fail-toJSON.nix:1:1: at /pwd/lang/eval-fail-toJSON.nix:1:1:
1| builtins.toJSON { 1| builtins.toJSON {
| ^ | ^
2| a.b = [ 2| a.b = [
… while evaluating attribute 'a' … while evaluating attribute 'a'
at /pwd/lang/eval-fail-toJSON.nix:2:3: at /pwd/lang/eval-fail-toJSON.nix:2:3:
1| builtins.toJSON { 1| builtins.toJSON {
2| a.b = [ 2| a.b = [
| ^ | ^
3| true 3| true
… while evaluating attribute 'b' … while evaluating attribute 'b'
at /pwd/lang/eval-fail-toJSON.nix:2:3: at /pwd/lang/eval-fail-toJSON.nix:2:3:
1| builtins.toJSON { 1| builtins.toJSON {
2| a.b = [ 2| a.b = [
| ^ | ^
@ -28,27 +22,21 @@ error:
… while evaluating list element at index 3 … while evaluating list element at index 3
… while evaluating attribute 'c' … while evaluating attribute 'c'
at /pwd/lang/eval-fail-toJSON.nix:7:7: at /pwd/lang/eval-fail-toJSON.nix:7:7:
6| { 6| {
7| c.d = throw "hah no"; 7| c.d = throw "hah no";
| ^ | ^
8| } 8| }
… while evaluating attribute 'd' … while evaluating attribute 'd'
at /pwd/lang/eval-fail-toJSON.nix:7:7: at /pwd/lang/eval-fail-toJSON.nix:7:7:
6| { 6| {
7| c.d = throw "hah no"; 7| c.d = throw "hah no";
| ^ | ^
8| } 8| }
… while calling the 'throw' builtin … while calling the 'throw' builtin
at /pwd/lang/eval-fail-toJSON.nix:7:13: at /pwd/lang/eval-fail-toJSON.nix:7:13:
6| { 6| {
7| c.d = throw "hah no"; 7| c.d = throw "hah no";
| ^ | ^

View file

@ -1,16 +1,12 @@
error: error:
… from call site … from call site
at /pwd/lang/eval-fail-undeclared-arg.nix:1:1: at /pwd/lang/eval-fail-undeclared-arg.nix:1:1:
1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";}
| ^ | ^
2| 2|
error: function 'anonymous lambda' called with unexpected argument 'y' error: function 'anonymous lambda' called with unexpected argument 'y'
at /pwd/lang/eval-fail-undeclared-arg.nix:1:2: at /pwd/lang/eval-fail-undeclared-arg.nix:1:2:
1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";}
| ^ | ^
2| 2|

View file

@ -1,8 +1,6 @@
error: error:
… while evaluating an attribute name … while evaluating an attribute name
at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10:
4| in 4| in
5| attr.${key} 5| attr.${key}
| ^ | ^

View file

@ -1,7 +1,5 @@
error: attribute 'x' already defined at «stdin»:1:3 error: attribute 'x' already defined at «stdin»:1:3
at «stdin»:3:3: at «stdin»:3:3:
2| y = 456; 2| y = 456;
3| x = 789; 3| x = 789;
| ^ | ^

Some files were not shown because too many files have changed in this diff Show more