diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 12c60c649..312c211dd 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v1.4.0 + uses: zeebe-io/backport-action@v2.0.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/configure.ac b/configure.ac index 225baf6b5..75ce7d01d 100644 --- a/configure.ac +++ b/configure.ac @@ -68,6 +68,9 @@ case "$host_os" in esac +ENSURE_NO_GCC_BUG_80431 + + # Check for pubsetbuf. AC_MSG_CHECKING([for pubsetbuf]) AC_LANG_PUSH(C++) diff --git a/doc/manual/local.mk b/doc/manual/local.mk index 8bf16e9dd..db3daf252 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -103,7 +103,7 @@ $(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage $(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp - $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "opt-"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; + $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "conf"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp; @mv $@.tmp $@ $(d)/nix.json: $(bindir)/nix diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 2fe77d2c6..794f78a07 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -18,6 +18,8 @@ - [Uninstalling Nix](installation/uninstall.md) - [Nix Store](store/index.md) - [File System Object](store/file-system-object.md) + - [Store Object](store/store-object.md) + - [Store Path](store/store-path.md) - [Nix Language](language/index.md) - [Data Types](language/values.md) - [Language Constructs](language/constructs.md) diff --git a/doc/manual/src/architecture/architecture.md b/doc/manual/src/architecture/architecture.md index 6e832e1f9..79429508f 100644 --- a/doc/manual/src/architecture/architecture.md +++ b/doc/manual/src/architecture/architecture.md @@ -63,7 +63,7 @@ The command line interface and Nix expressions are what users deal with most. > The Nix language itself does not have a notion of *packages* or *configurations*. > As far as we are concerned here, the inputs and results of a build plan are just data. -Underlying the command line interface and the Nix language evaluator is the [Nix store](../glossary.md#gloss-store), a mechanism to keep track of build plans, data, and references between them. +Underlying the command line interface and the Nix language evaluator is the [Nix store](../store/index.md), a mechanism to keep track of build plans, data, and references between them. It can also execute build plans to produce new data, which are made available to the operating system as files. A build plan itself is a series of *build tasks*, together with their build inputs. diff --git a/doc/manual/src/contributing/documentation.md b/doc/manual/src/contributing/documentation.md index 190d367db..75226cd1a 100644 --- a/doc/manual/src/contributing/documentation.md +++ b/doc/manual/src/contributing/documentation.md @@ -162,6 +162,24 @@ Please observe these guidelines to ease reviews: > This is a note. ``` + Highlight examples as such: + + ```` + > **Example** + > + > ```console + > $ nix --version + > ``` + ```` + + Highlight syntax definiions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation: + + ```` + > **Syntax** + > + > *attribute-set* = `{` [ *attribute-name* `=` *expression* `;` ... ] `}` + ```` + ### The `@docroot@` variable `@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own. diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 38c144fcc..fe08ceb94 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -210,7 +210,7 @@ See [supported compilation environments](#compilation-environments) and instruct To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running: ```console -make clean && bear -- make -j$NIX_BUILD_CORES install +make clean && bear -- make -j$NIX_BUILD_CORES default check install ``` Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration). diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index ad3cc147b..07891175a 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -59,7 +59,7 @@ - [store]{#gloss-store} A collection of store objects, with operations to manipulate that collection. - See [Nix Store] for details. + See [Nix store](./store/index.md) for details. There are many types of stores. See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list. @@ -86,10 +86,13 @@ - [store path]{#gloss-store-path} - The location of a [store object] in the file system, i.e., an - immediate child of the Nix store directory. + The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory. - Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1` + > **Example** + > + > `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1` + + See [Store Path](@docroot@/store/store-path.md) for details. [store path]: #gloss-store-path diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md index 276252c37..3cfb53998 100644 --- a/doc/manual/src/release-notes/rl-next.md +++ b/doc/manual/src/release-notes/rl-next.md @@ -15,3 +15,17 @@ - `nix-shell` shebang lines now support single-quoted arguments. - `builtins.fetchTree` is now marked as stable. + + +- The interface for creating and updating lock files has been overhauled: + + - [`nix flake lock`](@docroot@/command-ref/new-cli/nix3-flake-lock.md) only creates lock files and adds missing inputs now. + It will *never* update existing inputs. + + - [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) does the same, but *will* update inputs. + - Passing no arguments will update all inputs of the current flake, just like it already did. + - Passing input names as arguments will ensure only those are updated. This replaces the functionality of `nix flake lock --update-input` + - To operate on a flake outside the current directory, you must now pass `--flake path/to/flake`. + + - The flake-specific flags `--recreate-lock-file` and `--update-input` have been removed from all commands operating on installables. + They are superceded by `nix flake update`. diff --git a/doc/manual/src/store/index.md b/doc/manual/src/store/index.md index 316e04179..8a5305062 100644 --- a/doc/manual/src/store/index.md +++ b/doc/manual/src/store/index.md @@ -1,4 +1,5 @@ # Nix Store -The *Nix store* is an abstraction used by Nix to store immutable filesystem artifacts (such as software packages) that can have dependencies (*references*) between them. -There are multiple implementations of the Nix store, such as the actual filesystem (`/nix/store`) and binary caches. +The *Nix store* is an abstraction to store immutable file system data (such as software packages) that can have dependencies on other such data. + +There are multiple implementations of Nix stores with different capabilities, such as the actual filesystem (`/nix/store`) or binary caches. diff --git a/doc/manual/src/store/store-object.md b/doc/manual/src/store/store-object.md new file mode 100644 index 000000000..caf5657d1 --- /dev/null +++ b/doc/manual/src/store/store-object.md @@ -0,0 +1,10 @@ +## Store Object + +A Nix store is a collection of *store objects* with *references* between them. +A store object consists of + + - A [file system object](./file-system-object.md) as data + - A set of [store paths](./store-path.md) as references to other store objects + +Store objects are [immutable](https://en.wikipedia.org/wiki/Immutable_object): +Once created, they do not change until they are deleted. diff --git a/doc/manual/src/store/store-path.md b/doc/manual/src/store/store-path.md new file mode 100644 index 000000000..b5ad0c654 --- /dev/null +++ b/doc/manual/src/store/store-path.md @@ -0,0 +1,69 @@ +# Store Path + +Nix implements references to [store objects](./index.md#store-object) as *store paths*. + +Think of a store path as an [opaque], [unique identifier]: +The only way to obtain store path is by adding or building store objects. +A store path will always reference exactly one store object. + +[opaque]: https://en.m.wikipedia.org/wiki/Opaque_data_type +[unique identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier + +Store paths are pairs of + +- A 20-byte digest for identification +- A symbolic name for people to read + +> **Example** +> +> - Digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z` +> - Name: `firefox-33.1` + +To make store objects accessible to operating system processes, stores have to expose store objects through the file system. + +A store path is rendered to a file system path as the concatenation of + +- [Store directory](#store-directory) (typically `/nix/store`) +- Path separator (`/`) +- Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters) +- Hyphen (`-`) +- Name + +> **Example** +> +> ``` +> /nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1 +> |--------| |------------------------------| |----------| +> store directory digest name +> ``` + +## Store Directory + +Every [Nix store](./index.md) has a store directory. + +Not every store can be accessed through the file system. +But if the store has a file system representation, the store directory contains the store’s [file system objects], which can be addressed by [store paths](#store-path). + +[file system objects]: ./file-system-object.md + +This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in. + +> **Note** +> +> The store directory defaults to `/nix/store`, but is in principle arbitrary. + +It is important which store a given store object belongs to: +Files in the store object can contain store paths, and processes may read these paths. +Nix can only guarantee referential integrity if store paths do not cross store boundaries. + +Therefore one can only copy store objects to a different store if + +- The source and target stores' directories match + + or + +- The store object in question has no references, that is, contains no store paths + +One cannot copy a store object to a store with a different store directory. +Instead, it has to be rebuilt, together with all its dependencies. +It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums. diff --git a/flake.lock b/flake.lock index 56df9c3fb..991cef1ee 100644 --- a/flake.lock +++ b/flake.lock @@ -34,16 +34,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1695283060, - "narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=", + "lastModified": 1698876495, + "narHash": "sha256-nsQo2/mkDUFeAjuu92p0dEqhRvHHiENhkKVIV1y0/Oo=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "31ed632c692e6a36cfc18083b88ece892f863ed4", + "rev": "9eb24edd6a0027fed010ccfe300a9734d029983c", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-23.05-small", + "ref": "release-23.05", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 3472bf7a8..db2901ff8 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,9 @@ { description = "The purely functional package manager"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small"; + # FIXME go back to nixos-23.05-small once + # https://github.com/NixOS/nixpkgs/pull/264875 is included. + inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.05"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; diff --git a/m4/gcc_bug_80431.m4 b/m4/gcc_bug_80431.m4 new file mode 100644 index 000000000..e42f01956 --- /dev/null +++ b/m4/gcc_bug_80431.m4 @@ -0,0 +1,64 @@ +# Ensure that this bug is not present in the C++ toolchain we are using. +# +# URL for bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431 +# +# The test program is from that issue, with only a slight modification +# to set an exit status instead of printing strings. +AC_DEFUN([ENSURE_NO_GCC_BUG_80431], +[ + AC_MSG_CHECKING([that GCC bug 80431 is fixed]) + AC_LANG_PUSH(C++) + AC_RUN_IFELSE( + [AC_LANG_PROGRAM( + [[ + #include + + static bool a = true; + static bool b = true; + + struct Options { }; + + struct Option + { + Option(Options * options) + { + a = false; + } + + ~Option() + { + b = false; + } + }; + + struct MyOptions : Options { }; + + struct MyOptions2 : virtual MyOptions + { + Option foo{this}; + }; + ]], + [[ + { + MyOptions2 opts; + } + return (a << 1) | b; + ]])], + [status_80431=0], + [status_80431=$?], + [ + # Assume we're bug-free when cross-compiling + ]) + AC_LANG_POP(C++) + AS_CASE([$status_80431], + [0],[ + AC_MSG_RESULT(yes) + ], + [2],[ + AC_MSG_RESULT(no) + AC_MSG_ERROR(Cannot build Nix with C++ compiler with this bug) + ], + [ + AC_MSG_RESULT(unexpected result $status_80431: not expected failure with bug, ignoring) + ]) +]) diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index dafc0db3b..120c832ac 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -326,6 +326,12 @@ struct MixEnvironment : virtual Args { void setEnviron(); }; +void completeFlakeInputPath( + AddCompletions & completions, + ref evalState, + const std::vector & flakeRefs, + std::string_view prefix); + void completeFlakeRef(AddCompletions & completions, ref store, std::string_view prefix); void completeFlakeRefWithFragment( diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index eff18bbf6..3aff601e0 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -28,7 +28,7 @@ namespace nix { -static void completeFlakeInputPath( +void completeFlakeInputPath( AddCompletions & completions, ref evalState, const std::vector & flakeRefs, @@ -46,13 +46,6 @@ MixFlakeOptions::MixFlakeOptions() { auto category = "Common flake-related options"; - addFlag({ - .longName = "recreate-lock-file", - .description = "Recreate the flake's lock file from scratch.", - .category = category, - .handler = {&lockFlags.recreateLockFile, true} - }); - addFlag({ .longName = "no-update-lock-file", .description = "Do not allow any updates to the flake's lock file.", @@ -85,19 +78,6 @@ MixFlakeOptions::MixFlakeOptions() .handler = {&lockFlags.commitLockFile, true} }); - addFlag({ - .longName = "update-input", - .description = "Update a specific flake input (ignoring its previous entry in the lock file).", - .category = category, - .labels = {"input-path"}, - .handler = {[&](std::string s) { - lockFlags.inputUpdates.insert(flake::parseInputPath(s)); - }}, - .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { - completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); - }} - }); - addFlag({ .longName = "override-input", .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.", diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 45c9ec8f3..70ae7b584 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -447,8 +447,8 @@ LockedFlake lockFlake( assert(input.ref); - /* Do we have an entry in the existing lock file? And we - don't have a --update-input flag for this input? */ + /* Do we have an entry in the existing lock file? + And the input is not in updateInputs? */ std::shared_ptr oldLock; updatesUsed.insert(inputPath); @@ -472,9 +472,8 @@ LockedFlake lockFlake( node->inputs.insert_or_assign(id, childNode); - /* If we have an --update-input flag for an input - of this input, then we must fetch the flake to - update it. */ + /* If we have this input in updateInputs, then we + must fetch the flake to update it. */ auto lb = lockFlags.inputUpdates.lower_bound(inputPath); auto mustRefetch = @@ -616,7 +615,7 @@ LockedFlake lockFlake( for (auto & i : lockFlags.inputUpdates) if (!updatesUsed.count(i)) - warn("the flag '--update-input %s' does not match any input", printInputPath(i)); + warn("'%s' does not match any input of this flake", printInputPath(i)); /* Check 'follows' inputs. */ newLockFile.check(); @@ -651,14 +650,14 @@ LockedFlake lockFlake( bool lockFileExists = pathExists(outputLockFilePath); + auto s = chomp(diff); if (lockFileExists) { - auto s = chomp(diff); if (s.empty()) warn("updating lock file '%s'", outputLockFilePath); else warn("updating lock file '%s':\n%s", outputLockFilePath, s); } else - warn("creating lock file '%s'", outputLockFilePath); + warn("creating lock file '%s': \n%s", outputLockFilePath, s); std::optional commitMessage = std::nullopt; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 704e7007b..e3c775d90 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1548,10 +1548,8 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, try { auto checked = state.checkSourcePath(path); - auto exists = checked.pathExists(); - if (exists && mustBeDir) { - exists = checked.lstat().type == InputAccessor::tDirectory; - } + auto st = checked.maybeLstat(); + auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory); v.mkBool(exists); } catch (SysError & e) { /* Don't give away info from errors while canonicalising diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 3e654dd53..fd0757ac3 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -6,12 +6,31 @@ namespace nix::fetchers { -std::unique_ptr>> inputSchemes = nullptr; +using InputSchemeMap = std::map>; + +std::unique_ptr inputSchemes = nullptr; void registerInputScheme(std::shared_ptr && inputScheme) { - if (!inputSchemes) inputSchemes = std::make_unique>>(); - inputSchemes->push_back(std::move(inputScheme)); + if (!inputSchemes) + inputSchemes = std::make_unique(); + auto schemeName = inputScheme->schemeName(); + if (inputSchemes->count(schemeName) > 0) + throw Error("Input scheme with name %s already registered", schemeName); + inputSchemes->insert_or_assign(schemeName, std::move(inputScheme)); +} + +nlohmann::json dumpRegisterInputSchemeInfo() { + using nlohmann::json; + + auto res = json::object(); + + for (auto & [name, scheme] : *inputSchemes) { + auto & r = res[name] = json::object(); + r["allowedAttrs"] = scheme->allowedAttrs(); + } + + return res; } Input Input::fromURL(const std::string & url, bool requireTree) @@ -34,7 +53,7 @@ static void fixupInput(Input & input) Input Input::fromURL(const ParsedURL & url, bool requireTree) { - for (auto & inputScheme : *inputSchemes) { + for (auto & [_, inputScheme] : *inputSchemes) { auto res = inputScheme->inputFromURL(url, requireTree); if (res) { experimentalFeatureSettings.require(inputScheme->experimentalFeature()); @@ -49,20 +68,44 @@ Input Input::fromURL(const ParsedURL & url, bool requireTree) Input Input::fromAttrs(Attrs && attrs) { - for (auto & inputScheme : *inputSchemes) { - auto res = inputScheme->inputFromAttrs(attrs); - if (res) { - experimentalFeatureSettings.require(inputScheme->experimentalFeature()); - res->scheme = inputScheme; - fixupInput(*res); - return std::move(*res); - } - } + auto schemeName = ({ + auto schemeNameOpt = maybeGetStrAttr(attrs, "type"); + if (!schemeNameOpt) + throw Error("'type' attribute to specify input scheme is required but not provided"); + *std::move(schemeNameOpt); + }); - Input input; - input.attrs = attrs; - fixupInput(input); - return input; + auto raw = [&]() { + // Return an input without a scheme; most operations will fail, + // but not all of them. Doing this is to support those other + // operations which are supposed to be robust on + // unknown/uninterpretable inputs. + Input input; + input.attrs = attrs; + fixupInput(input); + return input; + }; + + std::shared_ptr inputScheme = ({ + auto i = inputSchemes->find(schemeName); + i == inputSchemes->end() ? nullptr : i->second; + }); + + if (!inputScheme) return raw(); + + experimentalFeatureSettings.require(inputScheme->experimentalFeature()); + + auto allowedAttrs = inputScheme->allowedAttrs(); + + for (auto & [name, _] : attrs) + if (name != "type" && allowedAttrs.count(name) == 0) + throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName); + + auto res = inputScheme->inputFromAttrs(attrs); + if (!res) return raw(); + res->scheme = inputScheme; + fixupInput(*res); + return std::move(*res); } ParsedURL Input::toURL() const @@ -325,7 +368,7 @@ std::pair, Input> InputScheme::getAccessor(ref store, throw UnimplementedError("InputScheme must implement fetch() or getAccessor()"); } -std::optional InputScheme::experimentalFeature() +std::optional InputScheme::experimentalFeature() const { return {}; } diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 7b70ab6e2..74789d5fa 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -8,6 +8,7 @@ #include "url.hh" #include +#include namespace nix { class Store; class StorePath; struct InputAccessor; } @@ -131,6 +132,24 @@ struct InputScheme virtual std::optional inputFromAttrs(const Attrs & attrs) const = 0; + /** + * What is the name of the scheme? + * + * The `type` attribute is used to select which input scheme is + * used, and then the other fields are forwarded to that input + * scheme. + */ + virtual std::string_view schemeName() const = 0; + + /** + * Allowed attributes in an attribute set that is converted to an + * input. + * + * `type` is not included from this set, because the `type` field is + parsed first to choose which scheme; `type` is always required. + */ + virtual StringSet allowedAttrs() const = 0; + virtual ParsedURL toURL(const Input & input) const; virtual Input applyOverrides( @@ -155,7 +174,7 @@ struct InputScheme /** * Is this `InputScheme` part of an experimental feature? */ - virtual std::optional experimentalFeature(); + virtual std::optional experimentalFeature() const; virtual bool isDirect(const Input & input) const { return true; } @@ -163,4 +182,6 @@ struct InputScheme void registerInputScheme(std::shared_ptr && fetcher); +nlohmann::json dumpRegisterInputSchemeInfo(); + } diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index 7638d2d82..81be64482 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -36,11 +36,11 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath); } - Stat lstat(const CanonPath & path) override + std::optional maybeLstat(const CanonPath & path) override { auto absPath = makeAbsPath(path); checkAllowed(absPath); - return PosixSourceAccessor::lstat(absPath); + return PosixSourceAccessor::maybeLstat(absPath); } DirEntries readDirectory(const CanonPath & path) override diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 5b14cfdb1..1cd300d75 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -402,12 +402,14 @@ struct GitInputAccessor : InputAccessor return path.isRoot() ? true : (bool) lookup(path); } - Stat lstat(const CanonPath & path) override + std::optional maybeLstat(const CanonPath & path) override { if (path.isRoot()) return Stat { .type = tDirectory }; - auto entry = need(path); + auto entry = lookup(path); + if (!entry) + return std::nullopt; auto mode = git_tree_entry_filemode(entry); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 5471eb260..9d4f4addc 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -168,14 +168,32 @@ struct GitInputScheme : InputScheme return inputFromAttrs(attrs); } + + std::string_view schemeName() const override + { + return "git"; + } + + StringSet allowedAttrs() const override + { + return { + "url", + "ref", + "rev", + "shallow", + "submodules", + "lastModified", + "revCount", + "narHash", + "allRefs", + "name", + "dirtyRev", + "dirtyShortRev", + }; + } + std::optional inputFromAttrs(const Attrs & attrs) const override { - if (maybeGetStrAttr(attrs, "type") != "git") return {}; - - for (auto & [name, value] : attrs) - if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name" && name != "dirtyRev" && name != "dirtyShortRev") - throw Error("unsupported Git input attribute '%s'", name); - maybeGetBoolAttr(attrs, "shallow"); maybeGetBoolAttr(attrs, "submodules"); maybeGetBoolAttr(attrs, "allRefs"); diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 617fc7468..6c9b29721 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -27,13 +27,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript); struct GitArchiveInputScheme : InputScheme { - virtual std::string type() const = 0; - virtual std::optional> accessHeaderFromToken(const std::string & token) const = 0; std::optional inputFromURL(const ParsedURL & url, bool requireTree) const override { - if (url.scheme != type()) return {}; + if (url.scheme != schemeName()) return {}; auto path = tokenizeString>(url.path, "/"); @@ -91,7 +89,7 @@ struct GitArchiveInputScheme : InputScheme throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev()); Input input; - input.attrs.insert_or_assign("type", type()); + input.attrs.insert_or_assign("type", std::string { schemeName() }); input.attrs.insert_or_assign("owner", path[0]); input.attrs.insert_or_assign("repo", path[1]); if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); @@ -101,14 +99,21 @@ struct GitArchiveInputScheme : InputScheme return input; } + StringSet allowedAttrs() const override + { + return { + "owner", + "repo", + "ref", + "rev", + "narHash", + "lastModified", + "host", + }; + } + std::optional inputFromAttrs(const Attrs & attrs) const override { - if (maybeGetStrAttr(attrs, "type") != type()) return {}; - - for (auto & [name, value] : attrs) - if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host") - throw Error("unsupported input attribute '%s'", name); - getStrAttr(attrs, "owner"); getStrAttr(attrs, "repo"); @@ -128,7 +133,7 @@ struct GitArchiveInputScheme : InputScheme if (ref) path += "/" + *ref; if (rev) path += "/" + rev->to_string(HashFormat::Base16, false); return ParsedURL { - .scheme = type(), + .scheme = std::string { schemeName() }, .path = path, }; } @@ -220,7 +225,7 @@ struct GitArchiveInputScheme : InputScheme return {result.storePath, input}; } - std::optional experimentalFeature() override + std::optional experimentalFeature() const override { return Xp::Flakes; } @@ -228,7 +233,7 @@ struct GitArchiveInputScheme : InputScheme struct GitHubInputScheme : GitArchiveInputScheme { - std::string type() const override { return "github"; } + std::string_view schemeName() const override { return "github"; } std::optional> accessHeaderFromToken(const std::string & token) const override { @@ -309,7 +314,7 @@ struct GitHubInputScheme : GitArchiveInputScheme struct GitLabInputScheme : GitArchiveInputScheme { - std::string type() const override { return "gitlab"; } + std::string_view schemeName() const override { return "gitlab"; } std::optional> accessHeaderFromToken(const std::string & token) const override { @@ -377,7 +382,7 @@ struct GitLabInputScheme : GitArchiveInputScheme struct SourceHutInputScheme : GitArchiveInputScheme { - std::string type() const override { return "sourcehut"; } + std::string_view schemeName() const override { return "sourcehut"; } std::optional> accessHeaderFromToken(const std::string & token) const override { diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index b18411bdc..8e30284c6 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -50,14 +50,23 @@ struct IndirectInputScheme : InputScheme return input; } + std::string_view schemeName() const override + { + return "indirect"; + } + + StringSet allowedAttrs() const override + { + return { + "id", + "ref", + "rev", + "narHash", + }; + } + std::optional inputFromAttrs(const Attrs & attrs) const override { - if (maybeGetStrAttr(attrs, "type") != "indirect") return {}; - - for (auto & [name, value] : attrs) - if (name != "type" && name != "id" && name != "ref" && name != "rev" && name != "narHash") - throw Error("unsupported indirect input attribute '%s'", name); - auto id = getStrAttr(attrs, "id"); if (!std::regex_match(id, flakeRegex)) throw BadURL("'%s' is not a valid flake ID", id); @@ -93,7 +102,7 @@ struct IndirectInputScheme : InputScheme throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); } - std::optional experimentalFeature() override + std::optional experimentalFeature() const override { return Xp::Flakes; } diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc index 817d063ba..6468ece41 100644 --- a/src/libfetchers/memory-input-accessor.cc +++ b/src/libfetchers/memory-input-accessor.cc @@ -20,12 +20,12 @@ struct MemoryInputAccessorImpl : MemoryInputAccessor return i != files.end(); } - Stat lstat(const CanonPath & path) override + std::optional maybeLstat(const CanonPath & path) override { auto i = files.find(path); if (i != files.end()) return Stat { .type = tRegular, .isExecutable = false }; - throw Error("file '%s' does not exist", path); + return std::nullopt; } DirEntries readDirectory(const CanonPath & path) override diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 97c48afc9..eda33dfe7 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -69,14 +69,25 @@ struct MercurialInputScheme : InputScheme return inputFromAttrs(attrs); } + std::string_view schemeName() const override + { + return "hg"; + } + + StringSet allowedAttrs() const override + { + return { + "url", + "ref", + "rev", + "revCount", + "narHash", + "name", + }; + } + std::optional inputFromAttrs(const Attrs & attrs) const override { - if (maybeGetStrAttr(attrs, "type") != "hg") return {}; - - for (auto & [name, value] : attrs) - if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name") - throw Error("unsupported Mercurial input attribute '%s'", name); - parseURL(getStrAttr(attrs, "url")); if (auto ref = maybeGetStrAttr(attrs, "ref")) { diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 22be0f1fe..f9b973320 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -32,23 +32,30 @@ struct PathInputScheme : InputScheme return input; } + std::string_view schemeName() const override + { + return "path"; + } + + StringSet allowedAttrs() const override + { + return { + "path", + /* Allow the user to pass in "fake" tree info + attributes. This is useful for making a pinned tree work + the same as the repository from which is exported (e.g. + path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). + */ + "rev", + "revCount", + "lastModified", + "narHash", + }; + } std::optional inputFromAttrs(const Attrs & attrs) const override { - if (maybeGetStrAttr(attrs, "type") != "path") return {}; - getStrAttr(attrs, "path"); - for (auto & [name, value] : attrs) - /* Allow the user to pass in "fake" tree info - attributes. This is useful for making a pinned tree - work the same as the repository from which is exported - (e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */ - if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path") - // checked in Input::fromAttrs - ; - else - throw Error("unsupported path input attribute '%s'", name); - Input input; input.attrs = attrs; return input; @@ -135,7 +142,7 @@ struct PathInputScheme : InputScheme return {std::move(*storePath), input}; } - std::optional experimentalFeature() override + std::optional experimentalFeature() const override { return Xp::Flakes; } diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index e1ea9b58b..0062878a9 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -184,7 +184,6 @@ DownloadTarballResult downloadTarball( // An input scheme corresponding to a curl-downloadable resource. struct CurlInputScheme : InputScheme { - virtual const std::string inputType() const = 0; const std::set transportUrlSchemes = {"file", "http", "https"}; const bool hasTarballExtension(std::string_view path) const @@ -222,22 +221,27 @@ struct CurlInputScheme : InputScheme url.query.erase("rev"); url.query.erase("revCount"); - input.attrs.insert_or_assign("type", inputType()); + input.attrs.insert_or_assign("type", std::string { schemeName() }); input.attrs.insert_or_assign("url", url.to_string()); return input; } + StringSet allowedAttrs() const override + { + return { + "type", + "url", + "narHash", + "name", + "unpack", + "rev", + "revCount", + "lastModified", + }; + } + std::optional inputFromAttrs(const Attrs & attrs) const override { - auto type = maybeGetStrAttr(attrs, "type"); - if (type != inputType()) return {}; - - // FIXME: some of these only apply to TarballInputScheme. - std::set allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount", "lastModified"}; - for (auto & [name, value] : attrs) - if (!allowedNames.count(name)) - throw Error("unsupported %s input attribute '%s'", *type, name); - Input input; input.attrs = attrs; @@ -258,14 +262,14 @@ struct CurlInputScheme : InputScheme struct FileInputScheme : CurlInputScheme { - const std::string inputType() const override { return "file"; } + std::string_view schemeName() const override { return "file"; } bool isValidURL(const ParsedURL & url, bool requireTree) const override { auto parsedUrlScheme = parseUrlScheme(url.scheme); return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) && (parsedUrlScheme.application - ? parsedUrlScheme.application.value() == inputType() + ? parsedUrlScheme.application.value() == schemeName() : (!requireTree && !hasTarballExtension(url.path))); } @@ -278,7 +282,7 @@ struct FileInputScheme : CurlInputScheme struct TarballInputScheme : CurlInputScheme { - const std::string inputType() const override { return "tarball"; } + std::string_view schemeName() const override { return "tarball"; } bool isValidURL(const ParsedURL & url, bool requireTree) const override { @@ -286,7 +290,7 @@ struct TarballInputScheme : CurlInputScheme return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) && (parsedUrlScheme.application - ? parsedUrlScheme.application.value() == inputType() + ? parsedUrlScheme.application.value() == schemeName() : (requireTree || hasTarballExtension(url.path))); } diff --git a/src/libfetchers/union-input-accessor.cc b/src/libfetchers/union-input-accessor.cc index 940c0e06c..ae942cb41 100644 --- a/src/libfetchers/union-input-accessor.cc +++ b/src/libfetchers/union-input-accessor.cc @@ -28,10 +28,10 @@ struct UnionInputAccessor : InputAccessor return accessor->pathExists(subpath); } - Stat lstat(const CanonPath & path) override + std::optional maybeLstat(const CanonPath & path) override { auto [accessor, subpath] = resolve(path); - return accessor->lstat(subpath); + return accessor->maybeLstat(subpath); } DirEntries readDirectory(const CanonPath & path) override diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 2a91233ec..6a52c4c51 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -2,7 +2,7 @@ #include "binary-cache-store.hh" #include "compression.hh" #include "derivations.hh" -#include "fs-accessor.hh" +#include "source-accessor.hh" #include "globals.hh" #include "nar-info.hh" #include "sync.hh" @@ -143,7 +143,7 @@ ref BinaryCacheStore::addToStoreCommon( write the compressed NAR to disk), into a HashSink (to get the NAR hash), and into a NarAccessor (to get the NAR listing). */ HashSink fileHashSink { htSHA256 }; - std::shared_ptr narAccessor; + std::shared_ptr narAccessor; HashSink narHashSink { htSHA256 }; { FdSink fileSink(fdTemp.get()); @@ -195,7 +195,7 @@ ref BinaryCacheStore::addToStoreCommon( if (writeNARListing) { nlohmann::json j = { {"version", 1}, - {"root", listNar(ref(narAccessor), "", true)}, + {"root", listNar(ref(narAccessor), CanonPath::root, true)}, }; upsertFile(std::string(info.path.hashPart()) + ".ls", j.dump(), "application/json"); @@ -206,9 +206,9 @@ ref BinaryCacheStore::addToStoreCommon( specify the NAR file and member containing the debug info. */ if (writeDebugInfo) { - std::string buildIdDir = "/lib/debug/.build-id"; + CanonPath buildIdDir("lib/debug/.build-id"); - if (narAccessor->stat(buildIdDir).type == FSAccessor::tDirectory) { + if (auto st = narAccessor->maybeLstat(buildIdDir); st && st->type == SourceAccessor::tDirectory) { ThreadPool threadPool(25); @@ -231,17 +231,17 @@ ref BinaryCacheStore::addToStoreCommon( std::regex regex1("^[0-9a-f]{2}$"); std::regex regex2("^[0-9a-f]{38}\\.debug$"); - for (auto & s1 : narAccessor->readDirectory(buildIdDir)) { - auto dir = buildIdDir + "/" + s1; + for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) { + auto dir = buildIdDir + s1; - if (narAccessor->stat(dir).type != FSAccessor::tDirectory + if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory || !std::regex_match(s1, regex1)) continue; - for (auto & s2 : narAccessor->readDirectory(dir)) { - auto debugPath = dir + "/" + s2; + for (auto & [s2, _type] : narAccessor->readDirectory(dir)) { + auto debugPath = dir + s2; - if (narAccessor->stat(debugPath).type != FSAccessor::tRegular + if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular || !std::regex_match(s2, regex2)) continue; @@ -250,7 +250,7 @@ ref BinaryCacheStore::addToStoreCommon( std::string key = "debuginfo/" + buildId; std::string target = "../" + narInfo->url; - threadPool.enqueue(std::bind(doFile, std::string(debugPath, 1), key, target)); + threadPool.enqueue(std::bind(doFile, std::string(debugPath.rel()), key, target)); } } @@ -503,9 +503,9 @@ void BinaryCacheStore::registerDrvOutput(const Realisation& info) { upsertFile(filePath, info.toJSON().dump(), "application/json"); } -ref BinaryCacheStore::getFSAccessor() +ref BinaryCacheStore::getFSAccessor(bool requireValidPath) { - return make_ref(ref(shared_from_this()), localNarCache); + return make_ref(ref(shared_from_this()), requireValidPath, localNarCache); } void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSet & sigs) diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 49f271d24..cea2a571f 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -17,28 +17,28 @@ struct BinaryCacheStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - const Setting compression{(StoreConfig*) this, "xz", "compression", + const Setting compression{this, "xz", "compression", "NAR compression method (`xz`, `bzip2`, `gzip`, `zstd`, or `none`)."}; - const Setting writeNARListing{(StoreConfig*) this, false, "write-nar-listing", + const Setting writeNARListing{this, false, "write-nar-listing", "Whether to write a JSON file that lists the files in each NAR."}; - const Setting writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", + const Setting writeDebugInfo{this, false, "index-debug-info", R"( Whether to index DWARF debug info files by build ID. This allows [`dwarffs`](https://github.com/edolstra/dwarffs) to fetch debug info on demand )"}; - const Setting secretKeyFile{(StoreConfig*) this, "", "secret-key", + const Setting secretKeyFile{this, "", "secret-key", "Path to the secret key used to sign the binary cache."}; - const Setting localNarCache{(StoreConfig*) this, "", "local-nar-cache", + const Setting localNarCache{this, "", "local-nar-cache", "Path to a local cache of NARs fetched from this binary cache, used by commands such as `nix store cat`."}; - const Setting parallelCompression{(StoreConfig*) this, false, "parallel-compression", + const Setting parallelCompression{this, false, "parallel-compression", "Enable multi-threaded compression of NARs. This is currently only available for `xz` and `zstd`."}; - const Setting compressionLevel{(StoreConfig*) this, -1, "compression-level", + const Setting compressionLevel{this, -1, "compression-level", R"( The *preset level* to be used when compressing NARs. The meaning and accepted values depend on the compression method selected. @@ -148,7 +148,7 @@ public: void narFromPath(const StorePath & path, Sink & sink) override; - ref getFSAccessor() override; + ref getFSAccessor(bool requireValidPath) override; void addSignatures(const StorePath & storePath, const StringSet & sigs) override; diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 738e7051e..dcb7dc6bc 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1563,10 +1563,11 @@ void LocalDerivationGoal::addDependency(const StorePath & path) Path source = worker.store.Store::toRealPath(path); Path target = chrootRootDir + worker.store.printStorePath(path); - if (pathExists(target)) + if (pathExists(target)) { // There is a similar debug message in doBind, so only run it in this block to not have double messages. debug("bind-mounting %s -> %s", target, source); throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path)); + } /* Bind-mount the path into the sandbox. This requires entering its mount namespace, which is not possible diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 007ffc05a..105d92f25 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -454,13 +454,13 @@ static void performOp(TunnelLogger * logger, ref store, eagerly consume the entire stream it's given, past the length of the Nar. */ TeeSource savedNARSource(from, saved); - ParseSink sink; /* null sink; just parse the NAR */ + NullParseSink sink; /* just parse the NAR */ parseDump(sink, savedNARSource); } else { /* Incrementally parse the NAR file, stripping the metadata, and streaming the sole file we expect into `saved`. */ - RetrieveRegularNARSink savedRegular { saved }; + RegularFileSink savedRegular { saved }; parseDump(savedRegular, from); if (!savedRegular.regular) throw Error("regular file expected"); } @@ -899,7 +899,7 @@ static void performOp(TunnelLogger * logger, ref store, source = std::make_unique(from, to); else { TeeSource tee { from, saved }; - ParseSink ether; + NullParseSink ether; parseDump(ether, tee); source = std::make_unique(saved.s); } diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index a5ceb29dc..efdad18e1 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -6,7 +6,6 @@ #include "split.hh" #include "common-protocol.hh" #include "common-protocol-impl.hh" -#include "fs-accessor.hh" #include #include diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 74d6ed3b5..821cda399 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -72,7 +72,7 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store Callback> callback) noexcept override { callback(nullptr); } - virtual ref getFSAccessor() override + virtual ref getFSAccessor(bool requireValidPath) override { unsupported("getFSAccessor"); } }; diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 91b7e30db..52130f8f6 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -65,7 +65,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) /* Extract the NAR from the source. */ StringSink saved; TeeSource tee { source, saved }; - ParseSink ether; + NullParseSink ether; parseDump(ether, tee); uint32_t magic = readInt(source); diff --git a/src/libstore/fs-accessor.hh b/src/libstore/fs-accessor.hh deleted file mode 100644 index 1df19e647..000000000 --- a/src/libstore/fs-accessor.hh +++ /dev/null @@ -1,52 +0,0 @@ -#pragma once -///@file - -#include "types.hh" - -namespace nix { - -/** - * An abstract class for accessing a filesystem-like structure, such - * as a (possibly remote) Nix store or the contents of a NAR file. - */ -class FSAccessor -{ -public: - enum Type { tMissing, tRegular, tSymlink, tDirectory }; - - struct Stat - { - Type type = tMissing; - /** - * regular files only - */ - uint64_t fileSize = 0; - /** - * regular files only - */ - bool isExecutable = false; // regular files only - /** - * regular files only - */ - uint64_t narOffset = 0; // regular files only - }; - - virtual ~FSAccessor() { } - - virtual Stat stat(const Path & path) = 0; - - virtual StringSet readDirectory(const Path & path) = 0; - - /** - * Read a file inside the store. - * - * If `requireValidPath` is set to `true` (the default), the path must be - * inside a valid store path, otherwise it just needs to be physically - * present (but not necessarily properly registered) - */ - virtual std::string readFile(const Path & path, bool requireValidPath = true) = 0; - - virtual std::string readLink(const Path & path) = 0; -}; - -} diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index c712f7eb1..731457354 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -17,10 +17,10 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig { using CommonSSHStoreConfig::CommonSSHStoreConfig; - const Setting remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", + const Setting remoteProgram{this, "nix-store", "remote-program", "Path to the `nix-store` executable on the remote machine."}; - const Setting maxConnections{(StoreConfig*) this, 1, "max-connections", + const Setting maxConnections{this, 1, "max-connections", "Maximum number of concurrent SSH connections."}; const std::string name() override { return "SSH Store"; } @@ -38,7 +38,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor // Hack for getting remote build log output. // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in // the documentation - const Setting logFD{(StoreConfig*) this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; + const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; struct Connection { @@ -363,7 +363,7 @@ public: void ensurePath(const StorePath & path) override { unsupported("ensurePath"); } - virtual ref getFSAccessor() override + virtual ref getFSAccessor(bool requireValidPath) override { unsupported("getFSAccessor"); } /** diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index b224fc3e9..953f3a264 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -1,5 +1,5 @@ #include "archive.hh" -#include "fs-accessor.hh" +#include "posix-source-accessor.hh" #include "store-api.hh" #include "local-fs-store.hh" #include "globals.hh" @@ -13,69 +13,53 @@ LocalFSStore::LocalFSStore(const Params & params) { } -struct LocalStoreAccessor : public FSAccessor +struct LocalStoreAccessor : PosixSourceAccessor { ref store; + bool requireValidPath; - LocalStoreAccessor(ref store) : store(store) { } + LocalStoreAccessor(ref store, bool requireValidPath) + : store(store) + , requireValidPath(requireValidPath) + { } - Path toRealPath(const Path & path, bool requireValidPath = true) + CanonPath toRealPath(const CanonPath & path) { - auto storePath = store->toStorePath(path).first; + auto [storePath, rest] = store->toStorePath(path.abs()); if (requireValidPath && !store->isValidPath(storePath)) throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); - return store->getRealStoreDir() + std::string(path, store->storeDir.size()); + return CanonPath(store->getRealStoreDir()) + storePath.to_string() + CanonPath(rest); } - FSAccessor::Stat stat(const Path & path) override + std::optional maybeLstat(const CanonPath & path) override { - auto realPath = toRealPath(path); - - struct stat st; - if (lstat(realPath.c_str(), &st)) { - if (errno == ENOENT || errno == ENOTDIR) return {Type::tMissing, 0, false}; - throw SysError("getting status of '%1%'", path); - } - - if (!S_ISREG(st.st_mode) && !S_ISDIR(st.st_mode) && !S_ISLNK(st.st_mode)) - throw Error("file '%1%' has unsupported type", path); - - return { - S_ISREG(st.st_mode) ? Type::tRegular : - S_ISLNK(st.st_mode) ? Type::tSymlink : - Type::tDirectory, - S_ISREG(st.st_mode) ? (uint64_t) st.st_size : 0, - S_ISREG(st.st_mode) && st.st_mode & S_IXUSR}; + return PosixSourceAccessor::maybeLstat(toRealPath(path)); } - StringSet readDirectory(const Path & path) override + DirEntries readDirectory(const CanonPath & path) override { - auto realPath = toRealPath(path); - - auto entries = nix::readDirectory(realPath); - - StringSet res; - for (auto & entry : entries) - res.insert(entry.name); - - return res; + return PosixSourceAccessor::readDirectory(toRealPath(path)); } - std::string readFile(const Path & path, bool requireValidPath = true) override + void readFile( + const CanonPath & path, + Sink & sink, + std::function sizeCallback) override { - return nix::readFile(toRealPath(path, requireValidPath)); + return PosixSourceAccessor::readFile(toRealPath(path), sink, sizeCallback); } - std::string readLink(const Path & path) override + std::string readLink(const CanonPath & path) override { - return nix::readLink(toRealPath(path)); + return PosixSourceAccessor::readLink(toRealPath(path)); } }; -ref LocalFSStore::getFSAccessor() +ref LocalFSStore::getFSAccessor(bool requireValidPath) { return make_ref(ref( - std::dynamic_pointer_cast(shared_from_this()))); + std::dynamic_pointer_cast(shared_from_this())), + requireValidPath); } void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh index 488109501..bf855b67e 100644 --- a/src/libstore/local-fs-store.hh +++ b/src/libstore/local-fs-store.hh @@ -11,25 +11,21 @@ struct LocalFSStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - // FIXME: the (StoreConfig*) cast works around a bug in gcc that causes - // it to omit the call to the Setting constructor. Clang works fine - // either way. - - const OptionalPathSetting rootDir{(StoreConfig*) this, std::nullopt, + const OptionalPathSetting rootDir{this, std::nullopt, "root", "Directory prefixed to all other paths."}; - const PathSetting stateDir{(StoreConfig*) this, + const PathSetting stateDir{this, rootDir.get() ? *rootDir.get() + "/nix/var/nix" : settings.nixStateDir, "state", "Directory where Nix will store state."}; - const PathSetting logDir{(StoreConfig*) this, + const PathSetting logDir{this, rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : settings.nixLogDir, "log", "directory where Nix will store log files."}; - const PathSetting realStoreDir{(StoreConfig*) this, + const PathSetting realStoreDir{this, rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real", "Physical path of the Nix store."}; }; @@ -47,7 +43,7 @@ public: LocalFSStore(const Params & params); void narFromPath(const StorePath & path, Sink & sink) override; - ref getFSAccessor() override; + ref getFSAccessor(bool requireValidPath) override; /** * Creates symlink from the `gcRoot` to the `storePath` and diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 1c2f6023a..a5e9426f8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1200,7 +1200,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, bool narRead = false; Finally cleanup = [&]() { if (!narRead) { - ParseSink sink; + NullParseSink sink; parseDump(sink, source); } }; diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index e97195f5b..fe26a0f27 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -40,12 +40,12 @@ struct LocalStoreConfig : virtual LocalFSStoreConfig { using LocalFSStoreConfig::LocalFSStoreConfig; - Setting requireSigs{(StoreConfig*) this, + Setting requireSigs{this, settings.requireSigs, "require-sigs", "Whether store paths copied into this store should have a trusted signature."}; - Setting readOnly{(StoreConfig*) this, + Setting readOnly{this, false, "read-only", R"( diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index f0dfcb19b..02993680f 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -11,13 +11,7 @@ namespace nix { struct NarMember { - FSAccessor::Type type = FSAccessor::Type::tMissing; - - bool isExecutable = false; - - /* If this is a regular file, position of the contents of this - file in the NAR. */ - uint64_t start = 0, size = 0; + SourceAccessor::Stat stat; std::string target; @@ -25,7 +19,7 @@ struct NarMember std::map children; }; -struct NarAccessor : public FSAccessor +struct NarAccessor : public SourceAccessor { std::optional nar; @@ -57,7 +51,7 @@ struct NarAccessor : public FSAccessor acc.root = std::move(member); parents.push(&acc.root); } else { - if (parents.top()->type != FSAccessor::Type::tDirectory) + if (parents.top()->stat.type != Type::tDirectory) throw Error("NAR file missing parent directory of path '%s'", path); auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member)); parents.push(&result.first->second); @@ -66,12 +60,12 @@ struct NarAccessor : public FSAccessor void createDirectory(const Path & path) override { - createMember(path, {FSAccessor::Type::tDirectory, false, 0, 0}); + createMember(path, {Type::tDirectory, false, 0, 0}); } void createRegularFile(const Path & path) override { - createMember(path, {FSAccessor::Type::tRegular, false, 0, 0}); + createMember(path, {Type::tRegular, false, 0, 0}); } void closeRegularFile() override @@ -79,14 +73,15 @@ struct NarAccessor : public FSAccessor void isExecutable() override { - parents.top()->isExecutable = true; + parents.top()->stat.isExecutable = true; } void preallocateContents(uint64_t size) override { assert(size <= std::numeric_limits::max()); - parents.top()->size = (uint64_t) size; - parents.top()->start = pos; + auto & st = parents.top()->stat; + st.fileSize = (uint64_t) size; + st.narOffset = pos; } void receiveContents(std::string_view data) override @@ -95,7 +90,9 @@ struct NarAccessor : public FSAccessor void createSymlink(const Path & path, const std::string & target) override { createMember(path, - NarMember{FSAccessor::Type::tSymlink, false, 0, 0, target}); + NarMember{ + .stat = {.type = Type::tSymlink}, + .target = target}); } size_t read(char * data, size_t len) override @@ -130,18 +127,20 @@ struct NarAccessor : public FSAccessor std::string type = v["type"]; if (type == "directory") { - member.type = FSAccessor::Type::tDirectory; + member.stat = {.type = Type::tDirectory}; for (auto i = v["entries"].begin(); i != v["entries"].end(); ++i) { std::string name = i.key(); recurse(member.children[name], i.value()); } } else if (type == "regular") { - member.type = FSAccessor::Type::tRegular; - member.size = v["size"]; - member.isExecutable = v.value("executable", false); - member.start = v["narOffset"]; + member.stat = { + .type = Type::tRegular, + .fileSize = v["size"], + .isExecutable = v.value("executable", false), + .narOffset = v["narOffset"] + }; } else if (type == "symlink") { - member.type = FSAccessor::Type::tSymlink; + member.stat = {.type = Type::tSymlink}; member.target = v.value("target", ""); } else return; }; @@ -150,134 +149,122 @@ struct NarAccessor : public FSAccessor recurse(root, v); } - NarMember * find(const Path & path) + NarMember * find(const CanonPath & path) { - Path canon = path == "" ? "" : canonPath(path); NarMember * current = &root; - auto end = path.end(); - for (auto it = path.begin(); it != end; ) { - // because it != end, the remaining component is non-empty so we need - // a directory - if (current->type != FSAccessor::Type::tDirectory) return nullptr; - // skip slash (canonPath above ensures that this is always a slash) - assert(*it == '/'); - it += 1; - - // lookup current component - auto next = std::find(it, end, '/'); - auto child = current->children.find(std::string(it, next)); + for (auto & i : path) { + if (current->stat.type != Type::tDirectory) return nullptr; + auto child = current->children.find(std::string(i)); if (child == current->children.end()) return nullptr; current = &child->second; - - it = next; } return current; } - NarMember & get(const Path & path) { + NarMember & get(const CanonPath & path) { auto result = find(path); - if (result == nullptr) + if (!result) throw Error("NAR file does not contain path '%1%'", path); return *result; } - Stat stat(const Path & path) override + std::optional maybeLstat(const CanonPath & path) override { auto i = find(path); - if (i == nullptr) - return {FSAccessor::Type::tMissing, 0, false}; - return {i->type, i->size, i->isExecutable, i->start}; + if (!i) + return std::nullopt; + return i->stat; } - StringSet readDirectory(const Path & path) override + DirEntries readDirectory(const CanonPath & path) override { auto i = get(path); - if (i.type != FSAccessor::Type::tDirectory) + if (i.stat.type != Type::tDirectory) throw Error("path '%1%' inside NAR file is not a directory", path); - StringSet res; + DirEntries res; for (auto & child : i.children) - res.insert(child.first); + res.insert_or_assign(child.first, std::nullopt); return res; } - std::string readFile(const Path & path, bool requireValidPath = true) override + std::string readFile(const CanonPath & path) override { auto i = get(path); - if (i.type != FSAccessor::Type::tRegular) + if (i.stat.type != Type::tRegular) throw Error("path '%1%' inside NAR file is not a regular file", path); - if (getNarBytes) return getNarBytes(i.start, i.size); + if (getNarBytes) return getNarBytes(*i.stat.narOffset, *i.stat.fileSize); assert(nar); - return std::string(*nar, i.start, i.size); + return std::string(*nar, *i.stat.narOffset, *i.stat.fileSize); } - std::string readLink(const Path & path) override + std::string readLink(const CanonPath & path) override { auto i = get(path); - if (i.type != FSAccessor::Type::tSymlink) + if (i.stat.type != Type::tSymlink) throw Error("path '%1%' inside NAR file is not a symlink", path); return i.target; } }; -ref makeNarAccessor(std::string && nar) +ref makeNarAccessor(std::string && nar) { return make_ref(std::move(nar)); } -ref makeNarAccessor(Source & source) +ref makeNarAccessor(Source & source) { return make_ref(source); } -ref makeLazyNarAccessor(const std::string & listing, +ref makeLazyNarAccessor(const std::string & listing, GetNarBytes getNarBytes) { return make_ref(listing, getNarBytes); } using nlohmann::json; -json listNar(ref accessor, const Path & path, bool recurse) +json listNar(ref accessor, const CanonPath & path, bool recurse) { - auto st = accessor->stat(path); + auto st = accessor->lstat(path); json obj = json::object(); switch (st.type) { - case FSAccessor::Type::tRegular: + case SourceAccessor::Type::tRegular: obj["type"] = "regular"; - obj["size"] = st.fileSize; + if (st.fileSize) + obj["size"] = *st.fileSize; if (st.isExecutable) obj["executable"] = true; - if (st.narOffset) - obj["narOffset"] = st.narOffset; + if (st.narOffset && *st.narOffset) + obj["narOffset"] = *st.narOffset; break; - case FSAccessor::Type::tDirectory: + case SourceAccessor::Type::tDirectory: obj["type"] = "directory"; { obj["entries"] = json::object(); json &res2 = obj["entries"]; - for (auto & name : accessor->readDirectory(path)) { + for (auto & [name, type] : accessor->readDirectory(path)) { if (recurse) { - res2[name] = listNar(accessor, path + "/" + name, true); + res2[name] = listNar(accessor, path + name, true); } else res2[name] = json::object(); } } break; - case FSAccessor::Type::tSymlink: + case SourceAccessor::Type::tSymlink: obj["type"] = "symlink"; obj["target"] = accessor->readLink(path); break; - case FSAccessor::Type::tMissing: - default: - throw Error("path '%s' does not exist in NAR", path); + case SourceAccessor::Type::tMisc: + assert(false); // cannot happen for NARs } return obj; } diff --git a/src/libstore/nar-accessor.hh b/src/libstore/nar-accessor.hh index 5e19bd3c7..433774524 100644 --- a/src/libstore/nar-accessor.hh +++ b/src/libstore/nar-accessor.hh @@ -1,10 +1,11 @@ #pragma once ///@file +#include "source-accessor.hh" + #include #include -#include "fs-accessor.hh" namespace nix { @@ -14,9 +15,9 @@ struct Source; * Return an object that provides access to the contents of a NAR * file. */ -ref makeNarAccessor(std::string && nar); +ref makeNarAccessor(std::string && nar); -ref makeNarAccessor(Source & source); +ref makeNarAccessor(Source & source); /** * Create a NAR accessor from a NAR listing (in the format produced by @@ -26,7 +27,7 @@ ref makeNarAccessor(Source & source); */ typedef std::function GetNarBytes; -ref makeLazyNarAccessor( +ref makeLazyNarAccessor( const std::string & listing, GetNarBytes getNarBytes); @@ -34,6 +35,6 @@ ref makeLazyNarAccessor( * Write a JSON representation of the contents of a NAR (except file * contents). */ -nlohmann::json listNar(ref accessor, const Path & path, bool recurse); +nlohmann::json listNar(ref accessor, const CanonPath & path, bool recurse); } diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index fcfb527f5..03e57a565 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -8,8 +8,9 @@ namespace nix { -RemoteFSAccessor::RemoteFSAccessor(ref store, const Path & cacheDir) +RemoteFSAccessor::RemoteFSAccessor(ref store, bool requireValidPath, const Path & cacheDir) : store(store) + , requireValidPath(requireValidPath) , cacheDir(cacheDir) { if (cacheDir != "") @@ -22,7 +23,7 @@ Path RemoteFSAccessor::makeCacheFile(std::string_view hashPart, const std::strin return fmt("%s/%s.%s", cacheDir, hashPart, ext); } -ref RemoteFSAccessor::addToCache(std::string_view hashPart, std::string && nar) +ref RemoteFSAccessor::addToCache(std::string_view hashPart, std::string && nar) { if (cacheDir != "") { try { @@ -38,7 +39,7 @@ ref RemoteFSAccessor::addToCache(std::string_view hashPart, std::str if (cacheDir != "") { try { - nlohmann::json j = listNar(narAccessor, "", true); + nlohmann::json j = listNar(narAccessor, CanonPath::root, true); writeFile(makeCacheFile(hashPart, "ls"), j.dump()); } catch (...) { ignoreException(); @@ -48,11 +49,10 @@ ref RemoteFSAccessor::addToCache(std::string_view hashPart, std::str return narAccessor; } -std::pair, Path> RemoteFSAccessor::fetch(const Path & path_, bool requireValidPath) +std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPath & path) { - auto path = canonPath(path_); - - auto [storePath, restPath] = store->toStorePath(path); + auto [storePath, restPath_] = store->toStorePath(path.abs()); + auto restPath = CanonPath(restPath_); if (requireValidPath && !store->isValidPath(storePath)) throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); @@ -63,7 +63,7 @@ std::pair, Path> RemoteFSAccessor::fetch(const Path & path_, boo std::string listing; Path cacheFile; - if (cacheDir != "" && pathExists(cacheFile = makeCacheFile(storePath.hashPart(), "nar"))) { + if (cacheDir != "" && nix::pathExists(cacheFile = makeCacheFile(storePath.hashPart(), "nar"))) { try { listing = nix::readFile(makeCacheFile(storePath.hashPart(), "ls")); @@ -101,25 +101,25 @@ std::pair, Path> RemoteFSAccessor::fetch(const Path & path_, boo return {addToCache(storePath.hashPart(), std::move(sink.s)), restPath}; } -FSAccessor::Stat RemoteFSAccessor::stat(const Path & path) +std::optional RemoteFSAccessor::maybeLstat(const CanonPath & path) { auto res = fetch(path); - return res.first->stat(res.second); + return res.first->maybeLstat(res.second); } -StringSet RemoteFSAccessor::readDirectory(const Path & path) +SourceAccessor::DirEntries RemoteFSAccessor::readDirectory(const CanonPath & path) { auto res = fetch(path); return res.first->readDirectory(res.second); } -std::string RemoteFSAccessor::readFile(const Path & path, bool requireValidPath) +std::string RemoteFSAccessor::readFile(const CanonPath & path) { - auto res = fetch(path, requireValidPath); + auto res = fetch(path); return res.first->readFile(res.second); } -std::string RemoteFSAccessor::readLink(const Path & path) +std::string RemoteFSAccessor::readLink(const CanonPath & path) { auto res = fetch(path); return res.first->readLink(res.second); diff --git a/src/libstore/remote-fs-accessor.hh b/src/libstore/remote-fs-accessor.hh index e2673b6f6..d09762a53 100644 --- a/src/libstore/remote-fs-accessor.hh +++ b/src/libstore/remote-fs-accessor.hh @@ -1,40 +1,43 @@ #pragma once ///@file -#include "fs-accessor.hh" +#include "source-accessor.hh" #include "ref.hh" #include "store-api.hh" namespace nix { -class RemoteFSAccessor : public FSAccessor +class RemoteFSAccessor : public SourceAccessor { ref store; - std::map> nars; + std::map> nars; + + bool requireValidPath; Path cacheDir; - std::pair, Path> fetch(const Path & path_, bool requireValidPath = true); + std::pair, CanonPath> fetch(const CanonPath & path); friend class BinaryCacheStore; Path makeCacheFile(std::string_view hashPart, const std::string & ext); - ref addToCache(std::string_view hashPart, std::string && nar); + ref addToCache(std::string_view hashPart, std::string && nar); public: RemoteFSAccessor(ref store, + bool requireValidPath = true, const /* FIXME: use std::optional */ Path & cacheDir = ""); - Stat stat(const Path & path) override; + std::optional maybeLstat(const CanonPath & path) override; - StringSet readDirectory(const Path & path) override; + DirEntries readDirectory(const CanonPath & path) override; - std::string readFile(const Path & path, bool requireValidPath = true) override; + std::string readFile(const CanonPath & path) override; - std::string readLink(const Path & path) override; + std::string readLink(const CanonPath & path) override; }; } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 7bdc25433..f16949f42 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -970,7 +970,7 @@ void RemoteStore::narFromPath(const StorePath & path, Sink & sink) copyNAR(conn->from, sink); } -ref RemoteStore::getFSAccessor() +ref RemoteStore::getFSAccessor(bool requireValidPath) { return make_ref(ref(shared_from_this())); } diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index a1ae82a0f..1cc11af86 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -22,10 +22,10 @@ struct RemoteStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - const Setting maxConnections{(StoreConfig*) this, 1, "max-connections", + const Setting maxConnections{this, 1, "max-connections", "Maximum number of concurrent connections to the Nix daemon."}; - const Setting maxConnectionAge{(StoreConfig*) this, + const Setting maxConnectionAge{this, std::numeric_limits::max(), "max-connection-age", "Maximum age of a connection before it is closed."}; @@ -185,7 +185,7 @@ protected: friend struct ConnectionHandle; - virtual ref getFSAccessor() override; + virtual ref getFSAccessor(bool requireValidPath) override; virtual void narFromPath(const StorePath & path, Sink & sink) override; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index d2fc6abaf..1a62d92d4 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -193,20 +193,20 @@ struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig { using BinaryCacheStoreConfig::BinaryCacheStoreConfig; - const Setting profile{(StoreConfig*) this, "", "profile", + const Setting profile{this, "", "profile", R"( The name of the AWS configuration profile to use. By default Nix will use the `default` profile. )"}; - const Setting region{(StoreConfig*) this, Aws::Region::US_EAST_1, "region", + const Setting region{this, Aws::Region::US_EAST_1, "region", R"( The region of the S3 bucket. If your bucket is not in `us–east-1`, you should always explicitly specify the region parameter. )"}; - const Setting scheme{(StoreConfig*) this, "", "scheme", + const Setting scheme{this, "", "scheme", R"( The scheme used for S3 requests, `https` (default) or `http`. This option allows you to disable HTTPS for binary caches which don't @@ -218,7 +218,7 @@ struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig > information. )"}; - const Setting endpoint{(StoreConfig*) this, "", "endpoint", + const Setting endpoint{this, "", "endpoint", R"( The URL of the endpoint of an S3-compatible service such as MinIO. Do not specify this setting if you're using Amazon S3. @@ -229,13 +229,13 @@ struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig > addressing instead of virtual host based addressing. )"}; - const Setting narinfoCompression{(StoreConfig*) this, "", "narinfo-compression", + const Setting narinfoCompression{this, "", "narinfo-compression", "Compression method for `.narinfo` files."}; - const Setting lsCompression{(StoreConfig*) this, "", "ls-compression", + const Setting lsCompression{this, "", "ls-compression", "Compression method for `.ls` files."}; - const Setting logCompression{(StoreConfig*) this, "", "log-compression", + const Setting logCompression{this, "", "log-compression", R"( Compression method for `log/*` files. It is recommended to use a compression method supported by most web browsers @@ -243,11 +243,11 @@ struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig )"}; const Setting multipartUpload{ - (StoreConfig*) this, false, "multipart-upload", + this, false, "multipart-upload", "Whether to use multi-part uploads."}; const Setting bufferSize{ - (StoreConfig*) this, 5 * 1024 * 1024, "buffer-size", + this, 5 * 1024 * 1024, "buffer-size", "Size (in bytes) of each part in multi-part uploads."}; const std::string name() override { return "S3 Binary Cache Store"; } diff --git a/src/libstore/ssh-store-config.hh b/src/libstore/ssh-store-config.hh index c27a5d00f..bf55d20cf 100644 --- a/src/libstore/ssh-store-config.hh +++ b/src/libstore/ssh-store-config.hh @@ -9,16 +9,16 @@ struct CommonSSHStoreConfig : virtual StoreConfig { using StoreConfig::StoreConfig; - const Setting sshKey{(StoreConfig*) this, "", "ssh-key", + const Setting sshKey{this, "", "ssh-key", "Path to the SSH private key used to authenticate to the remote machine."}; - const Setting sshPublicHostKey{(StoreConfig*) this, "", "base64-ssh-public-host-key", + const Setting sshPublicHostKey{this, "", "base64-ssh-public-host-key", "The public host key of the remote machine."}; - const Setting compress{(StoreConfig*) this, false, "compress", + const Setting compress{this, false, "compress", "Whether to enable SSH compression."}; - const Setting remoteStore{(StoreConfig*) this, "", "remote-store", + const Setting remoteStore{this, "", "remote-store", R"( [Store URL](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) to be used on the remote machine. The default is `auto` diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index 9c6c42ef4..4a6aad449 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -16,7 +16,7 @@ struct SSHStoreConfig : virtual RemoteStoreConfig, virtual CommonSSHStoreConfig using RemoteStoreConfig::RemoteStoreConfig; using CommonSSHStoreConfig::CommonSSHStoreConfig; - const Setting remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program", + const Setting remoteProgram{this, "nix-daemon", "remote-program", "Path to the `nix-daemon` executable on the remote machine."}; const std::string name() override { return "Experimental SSH Store"; } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 0399120d1..ac96e8bb1 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1,5 +1,5 @@ #include "crypto.hh" -#include "fs-accessor.hh" +#include "source-accessor.hh" #include "globals.hh" #include "derivations.hh" #include "store-api.hh" @@ -410,7 +410,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, /* Note that fileSink and unusualHashTee must be mutually exclusive, since they both write to caHashSink. Note that that requisite is currently true because the former is only used in the flat case. */ - RetrieveRegularNARSink fileSink { caHashSink }; + RegularFileSink fileSink { caHashSink }; TeeSink unusualHashTee { narHashSink, caHashSink }; auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 @@ -428,10 +428,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, information to narSink. */ TeeSource tapped { *fileSource, narSink }; - ParseSink blank; + NullParseSink blank; auto & parseSink = method == FileIngestionMethod::Flat - ? fileSink - : blank; + ? (ParseSink &) fileSink + : (ParseSink &) blank; /* The information that flows from tapped (besides being replicated in narSink), is now put in parseSink. */ @@ -1338,12 +1338,12 @@ Derivation Store::derivationFromPath(const StorePath & drvPath) return readDerivation(drvPath); } -Derivation readDerivationCommon(Store& store, const StorePath& drvPath, bool requireValidPath) +static Derivation readDerivationCommon(Store & store, const StorePath & drvPath, bool requireValidPath) { - auto accessor = store.getFSAccessor(); + auto accessor = store.getFSAccessor(requireValidPath); try { return parseDerivation(store, - accessor->readFile(store.printStorePath(drvPath), requireValidPath), + accessor->readFile(CanonPath(store.printStorePath(drvPath))), Derivation::nameFromPath(drvPath)); } catch (FormatError & e) { throw Error("error parsing derivation '%s': %s", store.printStorePath(drvPath), e.msg()); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index e123fccc5..6aa317e3d 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -70,7 +70,7 @@ MakeError(InvalidStoreURI, Error); struct BasicDerivation; struct Derivation; -class FSAccessor; +struct SourceAccessor; class NarInfoDiskCache; class Store; @@ -703,7 +703,7 @@ public: /** * @return An object to access files in the Nix store. */ - virtual ref getFSAccessor() = 0; + virtual ref getFSAccessor(bool requireValidPath = true) = 0; /** * Repair the contents of the given path by redownloading it using diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh index cdb28a001..a5ac9080a 100644 --- a/src/libstore/uds-remote-store.hh +++ b/src/libstore/uds-remote-store.hh @@ -35,8 +35,8 @@ public: static std::set uriSchemes() { return {"unix"}; } - ref getFSAccessor() override - { return LocalFSStore::getFSAccessor(); } + ref getFSAccessor(bool requireValidPath) override + { return LocalFSStore::getFSAccessor(requireValidPath); } void narFromPath(const StorePath & path, Sink & sink) override { LocalFSStore::narFromPath(path, sink); } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 3b1a1e0ef..4ca84d357 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -5,12 +5,6 @@ #include // for strcasecmp -#include -#include -#include -#include -#include - #include "archive.hh" #include "util.hh" #include "config.hh" @@ -299,7 +293,7 @@ void copyNAR(Source & source, Sink & sink) // FIXME: if 'source' is the output of dumpPath() followed by EOF, // we should just forward all data directly without parsing. - ParseSink parseSink; /* null sink; just parse the NAR */ + NullParseSink parseSink; /* just parse the NAR */ TeeSource wrapper { source, sink }; diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 3530783c1..2cf8ee891 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -73,33 +73,6 @@ time_t dumpPathAndGetMtime(const Path & path, Sink & sink, */ void dumpString(std::string_view s, Sink & sink); -/** - * If the NAR archive contains a single file at top-level, then save - * the contents of the file to `s`. Otherwise barf. - */ -struct RetrieveRegularNARSink : ParseSink -{ - bool regular = true; - Sink & sink; - - RetrieveRegularNARSink(Sink & sink) : sink(sink) { } - - void createDirectory(const Path & path) override - { - regular = false; - } - - void receiveContents(std::string_view data) override - { - sink(data); - } - - void createSymlink(const Path & path, const std::string & target) override - { - regular = false; - } -}; - void parseDump(ParseSink & sink, Source & source); void restorePath(const Path & path, Source & source); diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 6bc3cae07..811353c18 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -255,7 +255,18 @@ bool Args::processArgs(const Strings & args, bool finish) } if (!anyCompleted) exp.handler.fun(ss); - expectedArgs.pop_front(); + + /* Move the list element to the processedArgs. This is almost the same as + `processedArgs.push_back(expectedArgs.front()); expectedArgs.pop_front()`, + except that it will only adjust the next and prev pointers of the list + elements, meaning the actual contents don't move in memory. This is + critical to prevent invalidating internal pointers! */ + processedArgs.splice( + processedArgs.end(), + expectedArgs, + expectedArgs.begin(), + ++expectedArgs.begin()); + res = true; } diff --git a/src/libutil/args.hh b/src/libutil/args.hh index ff2bf3cab..e3b41313f 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -200,13 +200,25 @@ protected: /** * Queue of expected positional argument forms. * - * Positional arugment descriptions are inserted on the back. + * Positional argument descriptions are inserted on the back. * * As positional arguments are passed, these are popped from the * front, until there are hopefully none left as all args that were * expected in fact were passed. */ std::list expectedArgs; + /** + * List of processed positional argument forms. + * + * All items removed from `expectedArgs` are added here. After all + * arguments were processed, this list should be exactly the same as + * `expectedArgs` was before. + * + * This list is used to extend the lifetime of the argument forms. + * If this is not done, some closures that reference the command + * itself will segfault. + */ + std::list processedArgs; /** * Process some positional arugments diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 727255b45..ac72e47fb 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -44,6 +44,11 @@ inline std::string fmt(const std::string & s) return s; } +inline std::string fmt(std::string_view s) +{ + return std::string(s); +} + inline std::string fmt(const char * s) { return s; diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index a08a723a4..925e6f05d 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -5,6 +5,54 @@ namespace nix { +void copyRecursive( + SourceAccessor & accessor, const CanonPath & from, + ParseSink & sink, const Path & to) +{ + auto stat = accessor.lstat(from); + + switch (stat.type) { + case SourceAccessor::tSymlink: + { + sink.createSymlink(to, accessor.readLink(from)); + } + + case SourceAccessor::tRegular: + { + sink.createRegularFile(to); + if (stat.isExecutable) + sink.isExecutable(); + LambdaSink sink2 { + [&](auto d) { + sink.receiveContents(d); + } + }; + accessor.readFile(from, sink2, [&](uint64_t size) { + sink.preallocateContents(size); + }); + break; + } + + case SourceAccessor::tDirectory: + { + sink.createDirectory(to); + for (auto & [name, _] : accessor.readDirectory(from)) { + copyRecursive( + accessor, from + name, + sink, to + "/" + name); + break; + } + } + + case SourceAccessor::tMisc: + throw Error("file '%1%' has an unsupported type", from); + + default: + abort(); + } +} + + struct RestoreSinkSettings : Config { Setting preallocateContents{this, false, "preallocate-contents", diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh index 6837e2fc4..c22edd390 100644 --- a/src/libutil/fs-sink.hh +++ b/src/libutil/fs-sink.hh @@ -3,6 +3,7 @@ #include "types.hh" #include "serialise.hh" +#include "source-accessor.hh" namespace nix { @@ -11,32 +12,93 @@ namespace nix { */ struct ParseSink { - virtual void createDirectory(const Path & path) { }; + virtual void createDirectory(const Path & path) = 0; - virtual void createRegularFile(const Path & path) { }; - virtual void closeRegularFile() { }; - virtual void isExecutable() { }; + virtual void createRegularFile(const Path & path) = 0; + virtual void receiveContents(std::string_view data) = 0; + virtual void isExecutable() = 0; + virtual void closeRegularFile() = 0; + + virtual void createSymlink(const Path & path, const std::string & target) = 0; + + /** + * An optimization. By default, do nothing. + */ virtual void preallocateContents(uint64_t size) { }; - virtual void receiveContents(std::string_view data) { }; - - virtual void createSymlink(const Path & path, const std::string & target) { }; }; +/** + * Recusively copy file system objects from the source into the sink. + */ +void copyRecursive( + SourceAccessor & accessor, const CanonPath & sourcePath, + ParseSink & sink, const Path & destPath); + +/** + * Ignore everything and do nothing + */ +struct NullParseSink : ParseSink +{ + void createDirectory(const Path & path) override { } + void receiveContents(std::string_view data) override { } + void createSymlink(const Path & path, const std::string & target) override { } + void createRegularFile(const Path & path) override { } + void closeRegularFile() override { } + void isExecutable() override { } +}; + +/** + * Write files at the given path + */ struct RestoreSink : ParseSink { Path dstPath; - AutoCloseFD fd; - void createDirectory(const Path & path) override; void createRegularFile(const Path & path) override; - void closeRegularFile() override; - void isExecutable() override; - void preallocateContents(uint64_t size) override; void receiveContents(std::string_view data) override; + void isExecutable() override; + void closeRegularFile() override; void createSymlink(const Path & path, const std::string & target) override; + + void preallocateContents(uint64_t size) override; + +private: + AutoCloseFD fd; +}; + +/** + * Restore a single file at the top level, passing along + * `receiveContents` to the underlying `Sink`. For anything but a single + * file, set `regular = true` so the caller can fail accordingly. + */ +struct RegularFileSink : ParseSink +{ + bool regular = true; + Sink & sink; + + RegularFileSink(Sink & sink) : sink(sink) { } + + void createDirectory(const Path & path) override + { + regular = false; + } + + void receiveContents(std::string_view data) override + { + sink(data); + } + + void createSymlink(const Path & path, const std::string & target) override + { + regular = false; + } + + void createRegularFile(const Path & path) override { } + void closeRegularFile() override { } + void isExecutable() override { } }; } diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 48b4fe626..d5e32d989 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -44,9 +44,13 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path) return nix::pathExists(path.abs()); } -SourceAccessor::Stat PosixSourceAccessor::lstat(const CanonPath & path) +std::optional PosixSourceAccessor::maybeLstat(const CanonPath & path) { - auto st = nix::lstat(path.abs()); + struct stat st; + if (::lstat(path.c_str(), &st)) { + if (errno == ENOENT) return std::nullopt; + throw SysError("getting status of '%s'", showPath(path)); + } mtime = std::max(mtime, st.st_mtime); return Stat { .type = @@ -54,7 +58,8 @@ SourceAccessor::Stat PosixSourceAccessor::lstat(const CanonPath & path) S_ISDIR(st.st_mode) ? tDirectory : S_ISLNK(st.st_mode) ? tSymlink : tMisc, - .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR + .fileSize = S_ISREG(st.st_mode) ? std::optional(st.st_size) : std::nullopt, + .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR, }; } diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index 608f96ee2..cf087d26e 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -22,7 +22,7 @@ struct PosixSourceAccessor : SourceAccessor bool pathExists(const CanonPath & path) override; - Stat lstat(const CanonPath & path) override; + std::optional maybeLstat(const CanonPath & path) override; DirEntries readDirectory(const CanonPath & path) override; diff --git a/src/libutil/source-accessor.cc b/src/libutil/source-accessor.cc index d168a9667..e2114e18f 100644 --- a/src/libutil/source-accessor.cc +++ b/src/libutil/source-accessor.cc @@ -10,6 +10,11 @@ SourceAccessor::SourceAccessor() { } +bool SourceAccessor::pathExists(const CanonPath & path) +{ + return maybeLstat(path).has_value(); +} + std::string SourceAccessor::readFile(const CanonPath & path) { StringSink sink; @@ -42,12 +47,12 @@ Hash SourceAccessor::hashPath( return sink.finish().first; } -std::optional SourceAccessor::maybeLstat(const CanonPath & path) +SourceAccessor::Stat SourceAccessor::lstat(const CanonPath & path) { - // FIXME: merge these into one operation. - if (!pathExists(path)) - return {}; - return lstat(path); + if (auto st = maybeLstat(path)) + return *st; + else + throw Error("path '%s' does not exist", showPath(path)); } std::string SourceAccessor::showPath(const CanonPath & path) diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index fd823aa39..1a4e80361 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -40,7 +40,7 @@ struct SourceAccessor Sink & sink, std::function sizeCallback = [](uint64_t size){}); - virtual bool pathExists(const CanonPath & path) = 0; + virtual bool pathExists(const CanonPath & path); enum Type { tRegular, tSymlink, tDirectory, @@ -57,13 +57,29 @@ struct SourceAccessor struct Stat { Type type = tMisc; - //uint64_t fileSize = 0; // regular files only - bool isExecutable = false; // regular files only + + /** + * For regular files only: the size of the file. Not all + * accessors return this since it may be too expensive to + * compute. + */ + std::optional fileSize; + + /** + * For regular files only: whether this is an executable. + */ + bool isExecutable = false; + + /** + * For regular files only: the position of the contents of this + * file in the NAR. Only returned by NAR accessors. + */ + std::optional narOffset; }; - virtual Stat lstat(const CanonPath & path) = 0; + Stat lstat(const CanonPath & path); - std::optional maybeLstat(const CanonPath & path); + virtual std::optional maybeLstat(const CanonPath & path) = 0; typedef std::optional DirEntry; diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc old mode 100755 new mode 100644 diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc old mode 100755 new mode 100644 diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 504e35c81..54cc6a17f 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -4,7 +4,6 @@ #include "shared.hh" #include "store-api.hh" #include "local-fs-store.hh" -#include "fs-accessor.hh" #include "eval-inline.hh" using namespace nix; diff --git a/src/nix/cat.cc b/src/nix/cat.cc index 60aa66ce0..6e5a736f2 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -1,6 +1,5 @@ #include "command.hh" #include "store-api.hh" -#include "fs-accessor.hh" #include "nar-accessor.hh" using namespace nix; @@ -9,15 +8,12 @@ struct MixCat : virtual Args { std::string path; - void cat(ref accessor) + void cat(ref accessor) { - auto st = accessor->stat(path); - if (st.type == FSAccessor::Type::tMissing) - throw Error("path '%1%' does not exist", path); - if (st.type != FSAccessor::Type::tRegular) + auto st = accessor->lstat(CanonPath(path)); + if (st.type != SourceAccessor::Type::tRegular) throw Error("path '%1%' is not a regular file", path); - - writeFull(STDOUT_FILENO, accessor->readFile(path)); + writeFull(STDOUT_FILENO, accessor->readFile(CanonPath(path))); } }; diff --git a/src/nix/flake-lock.md b/src/nix/flake-lock.md index 2af0ad81e..6d10258e3 100644 --- a/src/nix/flake-lock.md +++ b/src/nix/flake-lock.md @@ -2,37 +2,39 @@ R""( # Examples -* Update the `nixpkgs` and `nix` inputs of the flake in the current - directory: +* Create the lock file for the flake in the current directory: ```console - # nix flake lock --update-input nixpkgs --update-input nix - * Updated 'nix': 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' -> 'github:NixOS/nix/8927cba62f5afb33b01016d5c4f7f8b7d0adde3c' - * Updated 'nixpkgs': 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' -> 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' + # nix flake lock + warning: creating lock file '/home/myself/repos/testflake/flake.lock': + • Added input 'nix': + 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' (2023-06-28) + • Added input 'nixpkgs': + 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' (2023-06-30) ``` +* Add missing inputs to the lock file for a flake in a different directory: + + ```console + # nix flake lock ~/repos/another + warning: updating lock file '/home/myself/repos/another/flake.lock': + • Added input 'nixpkgs': + 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' (2023-06-30) + ``` + + > **Note** + > + > When trying to refer to a flake in a subdirectory, write `./another` + > instead of `another`. + > Otherwise Nix will try to look up the flake in the registry. + # Description -This command updates the lock file of a flake (`flake.lock`) so that -it contains a lock for every flake input specified in -`flake.nix`. Existing lock file entries are not updated unless -required by a flag such as `--update-input`. +This command adds inputs to the lock file of a flake (`flake.lock`) +so that it contains a lock for every flake input specified in +`flake.nix`. Existing lock file entries are not updated. -Note that every command that operates on a flake will also update the -lock file if needed, and supports the same flags. Therefore, - -```console -# nix flake lock --update-input nixpkgs -# nix build -``` - -is equivalent to: - -```console -# nix build --update-input nixpkgs -``` - -Thus, this command is only useful if you want to update the lock file -separately from any other action such as building. +If you want to update existing lock entries, use +[`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) )"" diff --git a/src/nix/flake-update.md b/src/nix/flake-update.md index 8c6042d94..63df3b12a 100644 --- a/src/nix/flake-update.md +++ b/src/nix/flake-update.md @@ -2,33 +2,57 @@ R""( # Examples -* Recreate the lock file (i.e. update all inputs) and commit the new - lock file: +* Update all inputs (i.e. recreate the lock file from scratch): ```console - # nix flake update --commit-lock-file - * Updated 'nix': 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' -> 'github:NixOS/nix/8927cba62f5afb33b01016d5c4f7f8b7d0adde3c' - * Updated 'nixpkgs': 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' -> 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' - … - warning: committed new revision '158bcbd9d6cc08ab859c0810186c1beebc982aad' + # nix flake update + warning: updating lock file '/home/myself/repos/testflake/flake.lock': + • Updated input 'nix': + 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' (2023-06-28) + → 'github:NixOS/nix/8927cba62f5afb33b01016d5c4f7f8b7d0adde3c' (2023-07-11) + • Updated input 'nixpkgs': + 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' (2023-06-30) + → 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' (2023-07-05) ``` +* Update only a single input: + + ```console + # nix flake update nixpkgs + warning: updating lock file '/home/myself/repos/testflake/flake.lock': + • Updated input 'nixpkgs': + 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' (2023-06-30) + → 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' (2023-07-05) + ``` + +* Update only a single input of a flake in a different directory: + + ```console + # nix flake update nixpkgs --flake ~/repos/another + warning: updating lock file '/home/myself/repos/another/flake.lock': + • Updated input 'nixpkgs': + 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' (2023-06-30) + → 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' (2023-07-05) + ``` + + > **Note** + > + > When trying to refer to a flake in a subdirectory, write `./another` + > instead of `another`. + > Otherwise Nix will try to look up the flake in the registry. + # Description -This command recreates the lock file of a flake (`flake.lock`), thus -updating the lock for every unlocked input (like `nixpkgs`) to its -current version. This is equivalent to passing `--recreate-lock-file` -to any command that operates on a flake. That is, +This command updates the inputs in a lock file (`flake.lock`). +**By default, all inputs are updated**. If the lock file doesn't exist +yet, it will be created. If inputs are not in the lock file yet, they will be added. -```console -# nix flake update -# nix build -``` +Unlike other `nix flake` commands, `nix flake update` takes a list of names of inputs +to update as its positional arguments and operates on the flake in the current directory. +You can pass a different flake-url with `--flake` to override that default. -is equivalent to: - -```console -# nix build --recreate-lock-file -``` +The related command [`nix flake lock`](@docroot@/command-ref/new-cli/nix3-flake-lock.md) +also creates lock files and adds missing inputs, but is safer as it +will never update inputs already in the lock file. )"" diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 0116eff2e..e8906a252 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -24,8 +24,10 @@ using namespace nix; using namespace nix::flake; using json = nlohmann::json; +struct CmdFlakeUpdate; class FlakeCommand : virtual Args, public MixFlakeOptions { +protected: std::string flakeUrl = "."; public: @@ -63,6 +65,8 @@ public: struct CmdFlakeUpdate : FlakeCommand { +public: + std::string description() override { return "update flake lock file"; @@ -70,9 +74,31 @@ struct CmdFlakeUpdate : FlakeCommand CmdFlakeUpdate() { + expectedArgs.clear(); + addFlag({ + .longName="flake", + .description="The flake to operate on. Default is the current directory.", + .labels={"flake-url"}, + .handler={&flakeUrl}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeRef(completions, getStore(), prefix); + }} + }); + expectArgs({ + .label="inputs", + .optional=true, + .handler={[&](std::string inputToUpdate){ + auto inputPath = flake::parseInputPath(inputToUpdate); + if (lockFlags.inputUpdates.contains(inputPath)) + warn("Input '%s' was specified multiple times. You may have done this by accident."); + lockFlags.inputUpdates.insert(inputPath); + }}, + .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { + completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); + }} + }); + /* Remove flags that don't make sense. */ - removeFlag("recreate-lock-file"); - removeFlag("update-input"); removeFlag("no-update-lock-file"); removeFlag("no-write-lock-file"); } @@ -87,8 +113,9 @@ struct CmdFlakeUpdate : FlakeCommand void run(nix::ref store) override { settings.tarballTtl = 0; + auto updateAll = lockFlags.inputUpdates.empty(); - lockFlags.recreateLockFile = true; + lockFlags.recreateLockFile = updateAll; lockFlags.writeLockFile = true; lockFlags.applyNixConfig = true; diff --git a/src/nix/ls.cc b/src/nix/ls.cc index c990a303c..231456c9c 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -1,6 +1,5 @@ #include "command.hh" #include "store-api.hh" -#include "fs-accessor.hh" #include "nar-accessor.hh" #include "common-args.hh" #include @@ -39,61 +38,58 @@ struct MixLs : virtual Args, MixJSON }); } - void listText(ref accessor) + void listText(ref accessor) { - std::function doPath; + std::function doPath; - auto showFile = [&](const Path & curPath, const std::string & relPath) { + auto showFile = [&](const CanonPath & curPath, std::string_view relPath) { if (verbose) { - auto st = accessor->stat(curPath); + auto st = accessor->lstat(curPath); std::string tp = - st.type == FSAccessor::Type::tRegular ? + st.type == SourceAccessor::Type::tRegular ? (st.isExecutable ? "-r-xr-xr-x" : "-r--r--r--") : - st.type == FSAccessor::Type::tSymlink ? "lrwxrwxrwx" : + st.type == SourceAccessor::Type::tSymlink ? "lrwxrwxrwx" : "dr-xr-xr-x"; - auto line = fmt("%s %20d %s", tp, st.fileSize, relPath); - if (st.type == FSAccessor::Type::tSymlink) + auto line = fmt("%s %20d %s", tp, st.fileSize.value_or(0), relPath); + if (st.type == SourceAccessor::Type::tSymlink) line += " -> " + accessor->readLink(curPath); logger->cout(line); - if (recursive && st.type == FSAccessor::Type::tDirectory) + if (recursive && st.type == SourceAccessor::Type::tDirectory) doPath(st, curPath, relPath, false); } else { logger->cout(relPath); if (recursive) { - auto st = accessor->stat(curPath); - if (st.type == FSAccessor::Type::tDirectory) + auto st = accessor->lstat(curPath); + if (st.type == SourceAccessor::Type::tDirectory) doPath(st, curPath, relPath, false); } } }; - doPath = [&](const FSAccessor::Stat & st, const Path & curPath, - const std::string & relPath, bool showDirectory) + doPath = [&](const SourceAccessor::Stat & st, const CanonPath & curPath, + std::string_view relPath, bool showDirectory) { - if (st.type == FSAccessor::Type::tDirectory && !showDirectory) { + if (st.type == SourceAccessor::Type::tDirectory && !showDirectory) { auto names = accessor->readDirectory(curPath); - for (auto & name : names) - showFile(curPath + "/" + name, relPath + "/" + name); + for (auto & [name, type] : names) + showFile(curPath + name, relPath + "/" + name); } else showFile(curPath, relPath); }; - auto st = accessor->stat(path); - if (st.type == FSAccessor::Type::tMissing) - throw Error("path '%1%' does not exist", path); - doPath(st, path, - st.type == FSAccessor::Type::tDirectory ? "." : std::string(baseNameOf(path)), + auto path2 = CanonPath(path); + auto st = accessor->lstat(path2); + doPath(st, path2, + st.type == SourceAccessor::Type::tDirectory ? "." : path2.baseName().value_or(""), showDirectory); } - void list(ref accessor) + void list(ref accessor) { - if (path == "/") path = ""; - if (json) { if (showDirectory) throw UsageError("'--directory' is useless with '--json'"); - logger->cout("%s", listNar(accessor, path, recursive)); + logger->cout("%s", listNar(accessor, CanonPath(path), recursive)); } else listText(accessor); } diff --git a/src/nix/main.cc b/src/nix/main.cc index ffba10099..d20bc1f8a 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -188,6 +188,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs j["experimentalFeature"] = storeConfig->experimentalFeature(); } res["stores"] = std::move(stores); + res["fetchers"] = fetchers::dumpRegisterInputSchemeInfo(); return res.dump(); } diff --git a/src/nix/run.cc b/src/nix/run.cc index 1baf299ab..1465e8cde 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -6,7 +6,7 @@ #include "derivations.hh" #include "local-store.hh" #include "finally.hh" -#include "fs-accessor.hh" +#include "source-accessor.hh" #include "progress-bar.hh" #include "eval.hh" #include "build/personality.hh" @@ -119,9 +119,9 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (true) unixPath.push_front(store->printStorePath(path) + "/bin"); - auto propPath = store->printStorePath(path) + "/nix-support/propagated-user-env-packages"; - if (accessor->stat(propPath).type == FSAccessor::tRegular) { - for (auto & p : tokenizeString(readFile(propPath))) + auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; + if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { + for (auto & p : tokenizeString(accessor->readFile(propPath))) todo.push(store->parseStorePath(p)); } } diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index 055cf6d0d..aecf65922 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -1,7 +1,7 @@ #include "command.hh" #include "store-api.hh" #include "progress-bar.hh" -#include "fs-accessor.hh" +#include "source-accessor.hh" #include "shared.hh" #include @@ -175,7 +175,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions struct BailOut { }; printNode = [&](Node & node, const std::string & firstPad, const std::string & tailPad) { - auto pathS = store->printStorePath(node.path); + CanonPath pathS(store->printStorePath(node.path)); assert(node.dist != inf); if (precise) { @@ -183,7 +183,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions firstPad, node.visited ? "\e[38;5;244m" : "", firstPad != "" ? "→ " : "", - pathS); + pathS.abs()); } if (node.path == dependencyPath && !all @@ -210,24 +210,25 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions contain the reference. */ std::map hits; - std::function visitPath; + std::function visitPath; - visitPath = [&](const Path & p) { - auto st = accessor->stat(p); + visitPath = [&](const CanonPath & p) { + auto st = accessor->maybeLstat(p); + assert(st); - auto p2 = p == pathS ? "/" : std::string(p, pathS.size() + 1); + auto p2 = p == pathS ? "/" : p.abs().substr(pathS.abs().size() + 1); auto getColour = [&](const std::string & hash) { return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; }; - if (st.type == FSAccessor::Type::tDirectory) { + if (st->type == SourceAccessor::Type::tDirectory) { auto names = accessor->readDirectory(p); - for (auto & name : names) - visitPath(p + "/" + name); + for (auto & [name, type] : names) + visitPath(p + name); } - else if (st.type == FSAccessor::Type::tRegular) { + else if (st->type == SourceAccessor::Type::tRegular) { auto contents = accessor->readFile(p); for (auto & hash : hashes) { @@ -245,7 +246,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions } } - else if (st.type == FSAccessor::Type::tSymlink) { + else if (st->type == SourceAccessor::Type::tSymlink) { auto target = accessor->readLink(p); for (auto & hash : hashes) { diff --git a/tests/functional/completions.sh b/tests/functional/completions.sh index 7c1e4b287..d3d5bbd48 100644 --- a/tests/functional/completions.sh +++ b/tests/functional/completions.sh @@ -44,15 +44,18 @@ EOF # Input override completion [[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --override-input '')" == $'normal\na\t' ]] [[ "$(NIX_GET_COMPLETIONS=5 nix flake show ./foo --override-input '')" == $'normal\na\t' ]] +cd ./foo +[[ "$(NIX_GET_COMPLETIONS=3 nix flake update '')" == $'normal\na\t' ]] +cd .. +[[ "$(NIX_GET_COMPLETIONS=5 nix flake update --flake './foo' '')" == $'normal\na\t' ]] ## With multiple input flakes [[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]] ## With tilde expansion [[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]] -[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=5 nix flake show '~/foo' --update-input '')" == $'normal\na\t' ]] -[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix run '~/foo' --update-input '')" == $'normal\na\t' ]] +[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=5 nix flake update --flake '~/foo' '')" == $'normal\na\t' ]] ## Out of order -[[ "$(NIX_GET_COMPLETIONS=3 nix build --update-input '' ./foo)" == $'normal\na\t' ]] -[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --update-input '' ./bar)" == $'normal\na\t\nb\t' ]] +[[ "$(NIX_GET_COMPLETIONS=3 nix build --override-input '' '' ./foo)" == $'normal\na\t' ]] +[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --override-input '' '' ./bar)" == $'normal\na\t\nb\t' ]] # Cli flag completion NIX_GET_COMPLETIONS=2 nix build --log-form | grep -- "--log-format" diff --git a/tests/functional/flakes/circular.sh b/tests/functional/flakes/circular.sh index 09cd02edf..d3bb8e8a3 100644 --- a/tests/functional/flakes/circular.sh +++ b/tests/functional/flakes/circular.sh @@ -42,7 +42,8 @@ git -C $flakeB commit -a -m 'Foo' sed -i $flakeB/flake.nix -e 's/456/789/' git -C $flakeB commit -a -m 'Foo' -[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]] +nix flake update b --flake $flakeA +[[ $(nix eval $flakeA#foo) = 1912 ]] # Test list-inputs with circular dependencies nix flake metadata $flakeA diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index 70de28628..b0038935c 100644 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -300,7 +300,7 @@ nix build -o "$TEST_ROOT/result" flake4#xyzzy nix flake lock "$flake3Dir" [[ -z $(git -C "$flake3Dir" diff master || echo failed) ]] -nix flake update "$flake3Dir" --override-flake flake2 nixpkgs +nix flake update --flake "$flake3Dir" --override-flake flake2 nixpkgs [[ ! -z $(git -C "$flake3Dir" diff master || echo failed) ]] # Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore @@ -437,7 +437,7 @@ cat > "$flake3Dir/flake.nix" < baz.cat-nar diff -u baz.cat-nar $storePath/foo/baz +# Check that 'nix store cat' fails on invalid store paths. +invalidPath="$(dirname $storePath)/99999999999999999999999999999999-foo" +mv $storePath $invalidPath +expect 1 nix store cat $invalidPath/foo/baz +mv $invalidPath $storePath + # Test --json. diff -u \ <(nix nar ls --json $narFile / | jq -S) \ @@ -46,7 +52,7 @@ diff -u \ <(echo '{"type":"regular","size":0}' | jq -S) # Test missing files. -expect 1 nix store ls --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR' +expect 1 nix store ls --json -R $storePath/xyzzy 2>&1 | grep 'does not exist' expect 1 nix store ls $storePath/xyzzy 2>&1 | grep 'does not exist' # Test failure to dump.