mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 14:06:16 +02:00
Merge remote-tracking branch 'upstream/master' into package-nix
This commit is contained in:
commit
2220a4a22c
153 changed files with 2676 additions and 1547 deletions
27
.github/labeler.yml
vendored
27
.github/labeler.yml
vendored
|
@ -1,23 +1,30 @@
|
||||||
"documentation":
|
"documentation":
|
||||||
- doc/manual/*
|
- changed-files:
|
||||||
- src/nix/**/*.md
|
- any-glob-to-any-file: "doc/manual/*"
|
||||||
|
- any-glob-to-any-file: "src/nix/**/*.md"
|
||||||
|
|
||||||
"store":
|
"store":
|
||||||
- src/libstore/store-api.*
|
- changed-files:
|
||||||
- src/libstore/*-store.*
|
- any-glob-to-any-file: "src/libstore/store-api.*"
|
||||||
|
- any-glob-to-any-file: "src/libstore/*-store.*"
|
||||||
|
|
||||||
"fetching":
|
"fetching":
|
||||||
- src/libfetchers/**/*
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: "src/libfetchers/**/*"
|
||||||
|
|
||||||
"repl":
|
"repl":
|
||||||
- src/libcmd/repl.*
|
- changed-files:
|
||||||
- src/nix/repl.*
|
- any-glob-to-any-file: "src/libcmd/repl.*"
|
||||||
|
- any-glob-to-any-file: "src/nix/repl.*"
|
||||||
|
|
||||||
"new-cli":
|
"new-cli":
|
||||||
- src/nix/**/*
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: "src/nix/**/*"
|
||||||
|
|
||||||
"with-tests":
|
"with-tests":
|
||||||
|
- changed-files:
|
||||||
# Unit tests
|
# Unit tests
|
||||||
- src/*/tests/**/*
|
- any-glob-to-any-file: "src/*/tests/**/*"
|
||||||
# Functional and integration tests
|
# Functional and integration tests
|
||||||
- tests/functional/**/*
|
- any-glob-to-any-file: "tests/functional/**/*"
|
||||||
|
|
||||||
|
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v2.1.1
|
uses: zeebe-io/backport-action@v2.2.0
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
|
@ -20,12 +20,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
# The sandbox would otherwise be disabled by default on Darwin
|
# The sandbox would otherwise be disabled by default on Darwin
|
||||||
extra_nix_config: "sandbox = true"
|
extra_nix_config: "sandbox = true"
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v12
|
- uses: cachix/cachix-action@v13
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
@ -62,10 +62,10 @@ jobs:
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||||
- uses: cachix/cachix-action@v12
|
- uses: cachix/cachix-action@v13
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
@ -84,7 +84,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
|
@ -114,12 +114,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v23
|
- uses: cachix/install-nix-action@v24
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v12
|
- uses: cachix/cachix-action@v13
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
|
2
.github/workflows/labels.yml
vendored
2
.github/workflows/labels.yml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.repository_owner == 'NixOS'
|
if: github.repository_owner == 'NixOS'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v4
|
- uses: actions/labeler@v5
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
sync-labels: false
|
sync-labels: false
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -64,4 +64,4 @@ $(eval $(call include-sub-makefile, doc/manual/local.mk))
|
||||||
endif
|
endif
|
||||||
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src
|
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
|
||||||
|
|
|
@ -31,9 +31,9 @@
|
||||||
/installation/installation /installation 301!
|
/installation/installation /installation 301!
|
||||||
|
|
||||||
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
||||||
/package-management/channels* /command-ref/nix-channel 301!
|
/package-management/channels /command-ref/nix-channel 301!
|
||||||
/package-management/package-management /package-management 301!
|
/package-management/package-management /package-management 301!
|
||||||
/package-management/s3-substituter* /command-ref/new-cli/nix3-help-stores#s3-binary-cache-store 301!
|
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
||||||
|
|
||||||
/protocols/protocols /protocols 301!
|
/protocols/protocols /protocols 301!
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,15 @@ let
|
||||||
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
||||||
let
|
let
|
||||||
experimentalNotice = optionalString (experimental-feature != null) ''
|
experimentalNotice = optionalString (experimental-feature != null) ''
|
||||||
This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled.
|
> **Note**
|
||||||
|
>
|
||||||
|
> This function is only available if the [`${experimental-feature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) is enabled.
|
||||||
|
>
|
||||||
|
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> extra-experimental-features = ${experimental-feature}
|
||||||
|
> ```
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
squash ''
|
squash ''
|
||||||
|
@ -17,10 +25,9 @@ let
|
||||||
</dt>
|
</dt>
|
||||||
<dd>
|
<dd>
|
||||||
|
|
||||||
${doc}
|
|
||||||
|
|
||||||
${experimentalNotice}
|
${experimentalNotice}
|
||||||
|
|
||||||
|
${doc}
|
||||||
</dd>
|
</dd>
|
||||||
'';
|
'';
|
||||||
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||||
|
|
|
@ -103,7 +103,8 @@ let
|
||||||
${allStores}
|
${allStores}
|
||||||
'';
|
'';
|
||||||
index = replaceStrings
|
index = replaceStrings
|
||||||
[ "@store-types@" ] [ storesOverview ]
|
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
|
||||||
|
[ storesOverview "#local-store" "#local-daemon-store" ]
|
||||||
details.doc;
|
details.doc;
|
||||||
storesOverview =
|
storesOverview =
|
||||||
let
|
let
|
||||||
|
|
|
@ -20,10 +20,10 @@ let
|
||||||
else "`${setting}`";
|
else "`${setting}`";
|
||||||
# separate body to cleanly handle indentation
|
# separate body to cleanly handle indentation
|
||||||
body = ''
|
body = ''
|
||||||
${description}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
${description}
|
||||||
|
|
||||||
**Default:** ${showDefault documentDefault defaultValue}
|
**Default:** ${showDefault documentDefault defaultValue}
|
||||||
|
|
||||||
${showAliases aliases}
|
${showAliases aliases}
|
||||||
|
|
|
@ -19,10 +19,10 @@ let
|
||||||
result = squash ''
|
result = squash ''
|
||||||
# ${name}
|
# ${name}
|
||||||
|
|
||||||
${doc}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
${doc}
|
||||||
|
|
||||||
## Settings
|
## Settings
|
||||||
|
|
||||||
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||||
|
|
23
doc/manual/rl-next/hash-format-nix32.md
Normal file
23
doc/manual/rl-next/hash-format-nix32.md
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
---
|
||||||
|
synopsis: Rename hash format `base32` to `nix32`
|
||||||
|
prs: 9452
|
||||||
|
---
|
||||||
|
|
||||||
|
Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for
|
||||||
|
[Base32](https://en.wikipedia.org/wiki/Base32).
|
||||||
|
|
||||||
|
## Deprecation: Use `nix32` instead of `base32` as `toHashFormat`
|
||||||
|
|
||||||
|
For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from`
|
||||||
|
parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value
|
||||||
|
remains as a deprecated alias for `"base32"`. Please convert your code from:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}
|
||||||
|
```
|
||||||
|
|
||||||
|
to
|
||||||
|
|
||||||
|
```nix
|
||||||
|
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}
|
||||||
|
```
|
|
@ -1,9 +1,8 @@
|
||||||
|
---
|
||||||
synopsis: Mounted SSH Store
|
synopsis: Mounted SSH Store
|
||||||
issues: #7890
|
issues: 7890
|
||||||
prs: #7912
|
prs: 7912
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||||
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
synopsis: `nix config show`
|
---
|
||||||
issues: #7672
|
synopsis: Rename to `nix config show`
|
||||||
prs: #9477
|
issues: 7672
|
||||||
description: {
|
prs: 9477
|
||||||
|
---
|
||||||
|
|
||||||
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.
|
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
|
---
|
||||||
synopsis: Fix `nix-env --query --drv-path --json`
|
synopsis: Fix `nix-env --query --drv-path --json`
|
||||||
prs: #9257
|
prs: 9257
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
47
doc/manual/rl-next/nix-hash-convert.md
Normal file
47
doc/manual/rl-next/nix-hash-convert.md
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
---
|
||||||
|
synopsis: Add `nix hash convert`
|
||||||
|
prs: 9452
|
||||||
|
---
|
||||||
|
|
||||||
|
New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track
|
||||||
|
to stabilization! Examples:
|
||||||
|
|
||||||
|
- Convert the hash to `nix32`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||||
|
vw46m23bizj4n8afrc0fj19wrp7mj3c0
|
||||||
|
```
|
||||||
|
`nix32` is a base32 encoding with a nix-specific character set.
|
||||||
|
Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input
|
||||||
|
hash.
|
||||||
|
- Convert the hash to the `sri` format that includes an algorithm specification:
|
||||||
|
```bash
|
||||||
|
nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||||
|
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
||||||
|
```
|
||||||
|
or with an explicit `-to` format:
|
||||||
|
```bash
|
||||||
|
nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
||||||
|
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
||||||
|
```
|
||||||
|
- Assert the input format of the hash:
|
||||||
|
```bash
|
||||||
|
nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
|
||||||
|
error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32'
|
||||||
|
nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
|
||||||
|
sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=
|
||||||
|
```
|
||||||
|
|
||||||
|
The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion.
|
||||||
|
|
||||||
|
## Related Deprecations
|
||||||
|
|
||||||
|
The following commands are still available but will emit a deprecation warning. Please convert your code to
|
||||||
|
`nix hash convert`:
|
||||||
|
|
||||||
|
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2`
|
||||||
|
or even just `nix hash convert $hash1 $hash2` instead.
|
42
doc/manual/rl-next/source-positions-in-errors.md
Normal file
42
doc/manual/rl-next/source-positions-in-errors.md
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
---
|
||||||
|
synopsis: Source locations are printed more consistently in errors
|
||||||
|
issues: 561
|
||||||
|
prs: 9555
|
||||||
|
---
|
||||||
|
|
||||||
|
Source location information is now included in error messages more
|
||||||
|
consistently. Given this code:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
attr = {foo = "bar";};
|
||||||
|
key = {};
|
||||||
|
in
|
||||||
|
attr.${key}
|
||||||
|
```
|
||||||
|
|
||||||
|
Previously, Nix would show this unhelpful message when attempting to evaluate
|
||||||
|
it:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, the error message displays where the problematic value was found:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
at bad.nix:4:11:
|
||||||
|
|
||||||
|
3| key = {};
|
||||||
|
4| in attr.${key}
|
||||||
|
| ^
|
||||||
|
5|
|
||||||
|
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
|
@ -257,17 +257,16 @@ User-visible changes should come with a release note.
|
||||||
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
---
|
||||||
synopsis: Basically a title
|
synopsis: Basically a title
|
||||||
issues: #1234
|
issues: 1234
|
||||||
prs: #1238
|
prs: 1238
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Here's one or more paragraphs that describe the change.
|
Here's one or more paragraphs that describe the change.
|
||||||
|
|
||||||
- It's markdown
|
- It's markdown
|
||||||
- Add references to the manual using @docroot@
|
- Add references to the manual using @docroot@
|
||||||
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Significant changes should add the following header, which moves them to the top.
|
Significant changes should add the following header, which moves them to the top.
|
||||||
|
@ -283,3 +282,45 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master
|
||||||
|
|
||||||
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
||||||
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
||||||
|
|
||||||
|
## Branches
|
||||||
|
|
||||||
|
- [`master`](https://github.com/NixOS/nix/commits/master)
|
||||||
|
|
||||||
|
The main development branch. All changes are approved and merged here.
|
||||||
|
When developing a change, create a branch based on the latest `master`.
|
||||||
|
|
||||||
|
Maintainers try to [keep it in a release-worthy state](#reverting).
|
||||||
|
|
||||||
|
- [`maintenance-*.*`](https://github.com/NixOS/nix/branches/all?query=maintenance)
|
||||||
|
|
||||||
|
These branches are the subject of backports only, and are
|
||||||
|
also [kept](#reverting) in a release-worthy state.
|
||||||
|
|
||||||
|
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||||
|
|
||||||
|
- [`latest-release`](https://github.com/NixOS/nix/tree/latest-release)
|
||||||
|
|
||||||
|
The latest patch release of the latest minor version.
|
||||||
|
|
||||||
|
See [`maintainers/release-process.md`](https://github.com/NixOS/nix/blob/master/maintainers/release-process.md)
|
||||||
|
|
||||||
|
- [`backport-*-to-*`](https://github.com/NixOS/nix/branches/all?query=backport)
|
||||||
|
|
||||||
|
Generally branches created by the backport action.
|
||||||
|
|
||||||
|
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||||
|
|
||||||
|
- [_other_](https://github.com/NixOS/nix/branches/all)
|
||||||
|
|
||||||
|
Branches that do not conform to the above patterns should be feature branches.
|
||||||
|
|
||||||
|
## Reverting
|
||||||
|
|
||||||
|
If a change turns out to be merged by mistake, or contain a regression, it may be reverted.
|
||||||
|
A revert is not a rejection of the contribution, but merely part of an effective development process.
|
||||||
|
It makes sure that development keeps running smoothly, with minimal uncertainty, and less overhead.
|
||||||
|
If maintainers have to worry too much about avoiding reverts, they would not be able to merge as much.
|
||||||
|
By embracing reverts as a good part of the development process, everyone wins.
|
||||||
|
|
||||||
|
However, taking a step back may be frustrating, so maintainers will be extra supportive on the next try.
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
# Quick Start
|
# Quick Start
|
||||||
|
|
||||||
This chapter is for impatient people who don't like reading
|
This chapter is for impatient people who don't like reading documentation.
|
||||||
documentation. For more in-depth information you are kindly referred
|
For more in-depth information you are kindly referred to subsequent chapters.
|
||||||
to subsequent chapters.
|
|
||||||
|
|
||||||
1. Install Nix by running the following:
|
1. Install Nix:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ curl -L https://nixos.org/nix/install | sh
|
$ curl -L https://nixos.org/nix/install | sh
|
||||||
|
@ -13,87 +12,33 @@ to subsequent chapters.
|
||||||
The install script will use `sudo`, so make sure you have sufficient rights.
|
The install script will use `sudo`, so make sure you have sufficient rights.
|
||||||
On Linux, `--daemon` can be omitted for a single-user install.
|
On Linux, `--daemon` can be omitted for a single-user install.
|
||||||
|
|
||||||
For other installation methods, see [here](installation/index.md).
|
For other installation methods, see the detailed [installation instructions](installation/index.md).
|
||||||
|
|
||||||
1. See what installable packages are currently available in the
|
1. Run software without installing it permanently:
|
||||||
channel:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --query --available --attr-path
|
$ nix-shell --packages cowsay lolcat
|
||||||
nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
|
|
||||||
nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
|
|
||||||
nixpkgs.firefox firefox-33.0.2
|
|
||||||
nixpkgs.hello hello-2.9
|
|
||||||
nixpkgs.libxslt libxslt-1.1.28
|
|
||||||
…
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Install some packages from the channel:
|
This downloads the specified packages with all their dependencies, and drops you into a Bash shell where the commands provided by those packages are present.
|
||||||
|
This will not affect your normal environment:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --install --attr nixpkgs.hello
|
[nix-shell:~]$ cowsay Hello, Nix! | lolcat
|
||||||
```
|
```
|
||||||
|
|
||||||
This should download pre-built packages; it should not build them
|
Exiting the shell will make the programs disappear again:
|
||||||
locally (if it does, something went wrong).
|
|
||||||
|
|
||||||
1. Test that they work:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ which hello
|
|
||||||
/home/eelco/.nix-profile/bin/hello
|
|
||||||
$ hello
|
|
||||||
Hello, world!
|
|
||||||
```
|
|
||||||
|
|
||||||
1. Uninstall a package:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env --uninstall hello
|
|
||||||
```
|
|
||||||
|
|
||||||
1. You can also test a package without installing it:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-shell --packages hello
|
|
||||||
```
|
|
||||||
|
|
||||||
This builds or downloads GNU Hello and its dependencies, then drops
|
|
||||||
you into a Bash shell where the `hello` command is present, all
|
|
||||||
without affecting your normal environment:
|
|
||||||
|
|
||||||
```console
|
|
||||||
[nix-shell:~]$ hello
|
|
||||||
Hello, world!
|
|
||||||
|
|
||||||
[nix-shell:~]$ exit
|
[nix-shell:~]$ exit
|
||||||
|
$ lolcat
|
||||||
$ hello
|
lolcat: command not found
|
||||||
hello: command not found
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. To keep up-to-date with the channel, do:
|
1. Search for more packages on <search.nixos.org> to try them out.
|
||||||
|
|
||||||
|
1. Free up storage space:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-channel --update nixpkgs
|
$ nix-collect-garbage
|
||||||
$ nix-env --upgrade '*'
|
|
||||||
```
|
|
||||||
|
|
||||||
The latter command will upgrade each installed package for which
|
|
||||||
there is a “newer” version (as determined by comparing the version
|
|
||||||
numbers).
|
|
||||||
|
|
||||||
1. If you're unhappy with the result of a `nix-env` action (e.g., an
|
|
||||||
upgraded package turned out not to work properly), you can go back:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env --rollback
|
|
||||||
```
|
|
||||||
|
|
||||||
1. You should periodically run the Nix garbage collector to get rid of
|
|
||||||
unused packages, since uninstalls or upgrades don't actually delete
|
|
||||||
them:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-collect-garbage --delete-old
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||||
|
|
||||||
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||||
As described in the documentation for that feature, this is because we anticipate polishing it and then stabilizing it before the rest of flakes.
|
This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/contributing/experimental-features.md#xp-fetch-tree).
|
||||||
|
|
||||||
- The interface for creating and updating lock files has been overhauled:
|
- The interface for creating and updating lock files has been overhauled:
|
||||||
|
|
||||||
|
|
|
@ -29,15 +29,15 @@ supported settings for each store type are documented below.
|
||||||
The special store URL `auto` causes Nix to automatically select a
|
The special store URL `auto` causes Nix to automatically select a
|
||||||
store as follows:
|
store as follows:
|
||||||
|
|
||||||
* Use the [local store](#local-store) `/nix/store` if `/nix/var/nix`
|
* Use the [local store](./local-store.md) `/nix/store` if `/nix/var/nix`
|
||||||
is writable by the current user.
|
is writable by the current user.
|
||||||
|
|
||||||
* Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect
|
* Otherwise, if `/nix/var/nix/daemon-socket/socket` exists, [connect
|
||||||
to the Nix daemon listening on that socket](#local-daemon-store).
|
to the Nix daemon listening on that socket](./local-daemon-store.md).
|
||||||
|
|
||||||
* Otherwise, on Linux only, use the [local chroot store](#local-store)
|
* Otherwise, on Linux only, use the [local chroot store](./local-store.md)
|
||||||
`~/.local/share/nix/root`, which will be created automatically if it
|
`~/.local/share/nix/root`, which will be created automatically if it
|
||||||
does not exist.
|
does not exist.
|
||||||
|
|
||||||
* Otherwise, use the [local store](#local-store) `/nix/store`.
|
* Otherwise, use the [local store](./local-store.md) `/nix/store`.
|
||||||
|
|
||||||
|
|
|
@ -50,16 +50,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1700748986,
|
"lastModified": 1701355166,
|
||||||
"narHash": "sha256-/nqLrNU297h3PCw4QyDpZKZEUHmialJdZW2ceYFobds=",
|
"narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "9ba29e2346bc542e9909d1021e8fd7d4b3f64db0",
|
"rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-23.05-small",
|
"ref": "staging-23.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
18
flake.nix
18
flake.nix
|
@ -1,7 +1,13 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
# TODO Go back to nixos-23.05-small once
|
||||||
|
# https://github.com/NixOS/nixpkgs/pull/271202 is merged.
|
||||||
|
#
|
||||||
|
# Also, do not grab arbitrary further staging commits. This PR was
|
||||||
|
# carefully made to be based on release-23.05 and just contain
|
||||||
|
# rebuild-causing changes to packages that Nix actually uses.
|
||||||
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05";
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||||
|
@ -178,6 +184,8 @@
|
||||||
];
|
];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
changelog-d-nix = final.buildPackages.callPackage ./misc/changelog-d.nix { };
|
||||||
|
|
||||||
nix =
|
nix =
|
||||||
let
|
let
|
||||||
officialRelease = false;
|
officialRelease = false;
|
||||||
|
@ -197,6 +205,7 @@
|
||||||
libgit2 = final.libgit2-nix;
|
libgit2 = final.libgit2-nix;
|
||||||
lowdown = final.lowdown-nix;
|
lowdown = final.lowdown-nix;
|
||||||
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
||||||
|
changelog-d = final.changelog-d-nix;
|
||||||
} // {
|
} // {
|
||||||
# this is a proper separate downstream package, but put
|
# this is a proper separate downstream package, but put
|
||||||
# here also for back compat reasons.
|
# here also for back compat reasons.
|
||||||
|
@ -219,6 +228,8 @@
|
||||||
# Binary package for various platforms.
|
# Binary package for various platforms.
|
||||||
build = forAllSystems (system: self.packages.${system}.nix);
|
build = forAllSystems (system: self.packages.${system}.nix);
|
||||||
|
|
||||||
|
shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation);
|
||||||
|
|
||||||
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
||||||
|
|
||||||
buildCross = forAllCrossSystems (crossSystem:
|
buildCross = forAllCrossSystems (crossSystem:
|
||||||
|
@ -342,6 +353,11 @@
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
installTests = self.hydraJobs.installTests.${system};
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||||
|
rl-next =
|
||||||
|
let pkgs = nixpkgsFor.${system}.native;
|
||||||
|
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
|
||||||
|
LANG=C.UTF-8 ${pkgs.changelog-d-nix}/bin/changelog-d ${./doc/manual/rl-next} >$out
|
||||||
|
'';
|
||||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
});
|
});
|
||||||
|
|
31
misc/changelog-d.cabal.nix
Normal file
31
misc/changelog-d.cabal.nix
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
{ mkDerivation, aeson, base, bytestring, cabal-install-parsers
|
||||||
|
, Cabal-syntax, containers, directory, filepath, frontmatter
|
||||||
|
, generic-lens-lite, lib, mtl, optparse-applicative, parsec, pretty
|
||||||
|
, regex-applicative, text, pkgs
|
||||||
|
}:
|
||||||
|
let rev = "f30f6969e9cd8b56242309639d58acea21c99d06";
|
||||||
|
in
|
||||||
|
mkDerivation {
|
||||||
|
pname = "changelog-d";
|
||||||
|
version = "0.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
name = "changelog-d-${rev}.tar.gz";
|
||||||
|
url = "https://codeberg.org/roberth/changelog-d/archive/${rev}.tar.gz";
|
||||||
|
hash = "sha256-8a2+i5u7YoszAgd5OIEW0eYUcP8yfhtoOIhLJkylYJ4=";
|
||||||
|
} // { inherit rev; };
|
||||||
|
isLibrary = false;
|
||||||
|
isExecutable = true;
|
||||||
|
libraryHaskellDepends = [
|
||||||
|
aeson base bytestring cabal-install-parsers Cabal-syntax containers
|
||||||
|
directory filepath frontmatter generic-lens-lite mtl parsec pretty
|
||||||
|
regex-applicative text
|
||||||
|
];
|
||||||
|
executableHaskellDepends = [
|
||||||
|
base bytestring Cabal-syntax directory filepath
|
||||||
|
optparse-applicative
|
||||||
|
];
|
||||||
|
doHaddock = false;
|
||||||
|
description = "Concatenate changelog entries into a single one";
|
||||||
|
license = lib.licenses.gpl3Plus;
|
||||||
|
mainProgram = "changelog-d";
|
||||||
|
}
|
31
misc/changelog-d.nix
Normal file
31
misc/changelog-d.nix
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Taken temporarily from <nixpkgs/pkgs/by-name/ch/changelog-d/package.nix>
|
||||||
|
{
|
||||||
|
callPackage,
|
||||||
|
lib,
|
||||||
|
haskell,
|
||||||
|
haskellPackages,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
hsPkg = haskellPackages.callPackage ./changelog-d.cabal.nix { };
|
||||||
|
|
||||||
|
addCompletions = haskellPackages.generateOptparseApplicativeCompletions ["changelog-d"];
|
||||||
|
|
||||||
|
haskellModifications =
|
||||||
|
lib.flip lib.pipe [
|
||||||
|
addCompletions
|
||||||
|
haskell.lib.justStaticExecutables
|
||||||
|
];
|
||||||
|
|
||||||
|
mkDerivationOverrides = finalAttrs: oldAttrs: {
|
||||||
|
|
||||||
|
version = oldAttrs.version + "-git-${lib.strings.substring 0 7 oldAttrs.src.rev}";
|
||||||
|
|
||||||
|
meta = oldAttrs.meta // {
|
||||||
|
homepage = "https://codeberg.org/roberth/changelog-d";
|
||||||
|
maintainers = [ lib.maintainers.roberth ];
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
in
|
||||||
|
(haskellModifications hsPkg).overrideAttrs mkDerivationOverrides
|
|
@ -78,7 +78,7 @@ SV * queryReferences(char * path)
|
||||||
SV * queryPathHash(char * path)
|
SV * queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
|
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
|
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
@ -205,8 +205,8 @@ void importPaths(int fd, int dontCheckSigs)
|
||||||
SV * hashPath(char * algo, int base32, char * path)
|
SV * hashPath(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashPath(parseHashType(algo), path).first;
|
Hash h = hashPath(parseHashAlgo(algo), path).first;
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -216,8 +216,8 @@ SV * hashPath(char * algo, int base32, char * path)
|
||||||
SV * hashFile(char * algo, int base32, char * path)
|
SV * hashFile(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashFile(parseHashType(algo), path);
|
Hash h = hashFile(parseHashAlgo(algo), path);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -227,8 +227,8 @@ SV * hashFile(char * algo, int base32, char * path)
|
||||||
SV * hashString(char * algo, int base32, char * s)
|
SV * hashString(char * algo, int base32, char * s)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashString(parseHashType(algo), s);
|
Hash h = hashString(parseHashAlgo(algo), s);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -238,8 +238,8 @@ SV * hashString(char * algo, int base32, char * s)
|
||||||
SV * convertHash(char * algo, char * s, int toBase32)
|
SV * convertHash(char * algo, char * s, int toBase32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
auto h = Hash::parseAny(s, parseHashAlgo(algo));
|
||||||
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -281,7 +281,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo));
|
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -291,7 +291,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||||
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(hash, parseHashType(algo));
|
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
||||||
.method = method,
|
.method = method,
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
|
#include "markdown.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON()
|
||||||
return MultiCommand::toJSON();
|
return MultiCommand::toJSON();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void NixMultiCommand::run()
|
||||||
|
{
|
||||||
|
if (!command) {
|
||||||
|
std::set<std::string> subCommandTextLines;
|
||||||
|
for (auto & [name, _] : commands)
|
||||||
|
subCommandTextLines.insert(fmt("- `%s`", name));
|
||||||
|
std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n",
|
||||||
|
commandName, concatStringsSep("\n", subCommandTextLines));
|
||||||
|
throw UsageError(renderMarkdownToTerminal(markdownError));
|
||||||
|
}
|
||||||
|
command->second->run();
|
||||||
|
}
|
||||||
|
|
||||||
StoreCommand::StoreCommand()
|
StoreCommand::StoreCommand()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,9 +26,13 @@ static constexpr Command::Category catNixInstallation = 102;
|
||||||
|
|
||||||
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
||||||
|
|
||||||
struct NixMultiCommand : virtual MultiCommand, virtual Command
|
struct NixMultiCommand : MultiCommand, virtual Command
|
||||||
{
|
{
|
||||||
nlohmann::json toJSON() override;
|
nlohmann::json toJSON() override;
|
||||||
|
|
||||||
|
using MultiCommand::MultiCommand;
|
||||||
|
|
||||||
|
virtual void run() override;
|
||||||
};
|
};
|
||||||
|
|
||||||
// For the overloaded run methods
|
// For the overloaded run methods
|
||||||
|
|
|
@ -260,9 +260,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||||
|
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
Expr *e = state->parseExprFromFile(
|
auto e =
|
||||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
state->parseExprFromFile(
|
||||||
);
|
resolveExprPath(
|
||||||
|
lookupFileArg(*state, *file)));
|
||||||
|
|
||||||
Value root;
|
Value root;
|
||||||
state->eval(e, root);
|
state->eval(e, root);
|
||||||
|
|
|
@ -103,8 +103,10 @@ void EvalState::forceValue(Value & v, Callable getPos)
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (v.isApp())
|
else if (v.isApp()) {
|
||||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
PosIdx pos = getPos();
|
||||||
|
callFunction(*v.app.left, *v.app.right, v, pos);
|
||||||
|
}
|
||||||
else if (v.isBlackhole())
|
else if (v.isBlackhole())
|
||||||
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
||||||
}
|
}
|
||||||
|
@ -121,9 +123,9 @@ template <typename Callable>
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v, noPos);
|
|
||||||
if (v.type() != nAttrs) {
|
|
||||||
PosIdx pos = getPos();
|
PosIdx pos = getPos();
|
||||||
|
forceValue(v, pos);
|
||||||
|
if (v.type() != nAttrs) {
|
||||||
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,7 +134,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v, noPos);
|
forceValue(v, pos);
|
||||||
if (!v.isList()) {
|
if (!v.isList()) {
|
||||||
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include "profiles.hh"
|
#include "profiles.hh"
|
||||||
#include "print.hh"
|
#include "print.hh"
|
||||||
#include "fs-input-accessor.hh"
|
#include "fs-input-accessor.hh"
|
||||||
|
#include "filtering-input-accessor.hh"
|
||||||
#include "memory-input-accessor.hh"
|
#include "memory-input-accessor.hh"
|
||||||
#include "signals.hh"
|
#include "signals.hh"
|
||||||
#include "gc-small-vector.hh"
|
#include "gc-small-vector.hh"
|
||||||
|
@ -344,7 +345,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
||||||
} else {
|
} else {
|
||||||
Value nameValue;
|
Value nameValue;
|
||||||
name.expr->eval(state, env, nameValue);
|
name.expr->eval(state, env, nameValue);
|
||||||
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
|
state.forceStringNoCtx(nameValue, name.expr->getPos(), "while evaluating an attribute name");
|
||||||
return state.symbols.create(nameValue.string_view());
|
return state.symbols.create(nameValue.string_view());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -509,7 +510,16 @@ EvalState::EvalState(
|
||||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||||
, repair(NoRepair)
|
, repair(NoRepair)
|
||||||
, emptyBindings(0)
|
, emptyBindings(0)
|
||||||
, rootFS(makeFSInputAccessor(CanonPath::root))
|
, rootFS(
|
||||||
|
evalSettings.restrictEval || evalSettings.pureEval
|
||||||
|
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
|
||||||
|
[](const CanonPath & path) -> RestrictedPathError {
|
||||||
|
auto modeInformation = evalSettings.pureEval
|
||||||
|
? "in pure evaluation mode (use '--impure' to override)"
|
||||||
|
: "in restricted mode";
|
||||||
|
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||||
|
}))
|
||||||
|
: makeFSInputAccessor(CanonPath::root))
|
||||||
, corepkgsFS(makeMemoryInputAccessor())
|
, corepkgsFS(makeMemoryInputAccessor())
|
||||||
, internalFS(makeMemoryInputAccessor())
|
, internalFS(makeMemoryInputAccessor())
|
||||||
, derivationInternal{corepkgsFS->addFile(
|
, derivationInternal{corepkgsFS->addFile(
|
||||||
|
@ -551,28 +561,10 @@ EvalState::EvalState(
|
||||||
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
|
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (evalSettings.restrictEval || evalSettings.pureEval) {
|
/* Allow access to all paths in the search path. */
|
||||||
allowedPaths = PathSet();
|
if (rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
|
for (auto & i : searchPath.elements)
|
||||||
for (auto & i : searchPath.elements) {
|
resolveSearchPathPath(i.path, true);
|
||||||
auto r = resolveSearchPathPath(i.path);
|
|
||||||
if (!r) continue;
|
|
||||||
|
|
||||||
auto path = std::move(*r);
|
|
||||||
|
|
||||||
if (store->isInStore(path)) {
|
|
||||||
try {
|
|
||||||
StorePathSet closure;
|
|
||||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
|
||||||
for (auto & path : closure)
|
|
||||||
allowPath(path);
|
|
||||||
} catch (InvalidPath &) {
|
|
||||||
allowPath(path);
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
allowPath(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
corepkgsFS->addFile(
|
corepkgsFS->addFile(
|
||||||
CanonPath("fetchurl.nix"),
|
CanonPath("fetchurl.nix"),
|
||||||
|
@ -590,14 +582,14 @@ EvalState::~EvalState()
|
||||||
|
|
||||||
void EvalState::allowPath(const Path & path)
|
void EvalState::allowPath(const Path & path)
|
||||||
{
|
{
|
||||||
if (allowedPaths)
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
allowedPaths->insert(path);
|
rootFS2->allowPath(CanonPath(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::allowPath(const StorePath & storePath)
|
void EvalState::allowPath(const StorePath & storePath)
|
||||||
{
|
{
|
||||||
if (allowedPaths)
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
allowedPaths->insert(store->toRealPath(storePath));
|
rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
|
||||||
|
@ -607,54 +599,6 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
||||||
mkStorePathString(storePath, v);
|
mkStorePathString(storePath, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
|
||||||
{
|
|
||||||
// Don't check non-rootFS accessors, they're in a different namespace.
|
|
||||||
if (path_.accessor != ref<InputAccessor>(rootFS)) return path_;
|
|
||||||
|
|
||||||
if (!allowedPaths) return path_;
|
|
||||||
|
|
||||||
auto i = resolvedPaths.find(path_.path.abs());
|
|
||||||
if (i != resolvedPaths.end())
|
|
||||||
return i->second;
|
|
||||||
|
|
||||||
bool found = false;
|
|
||||||
|
|
||||||
/* First canonicalize the path without symlinks, so we make sure an
|
|
||||||
* attacker can't append ../../... to a path that would be in allowedPaths
|
|
||||||
* and thus leak symlink targets.
|
|
||||||
*/
|
|
||||||
Path abspath = canonPath(path_.path.abs());
|
|
||||||
|
|
||||||
for (auto & i : *allowedPaths) {
|
|
||||||
if (isDirOrInDir(abspath, i)) {
|
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!found) {
|
|
||||||
auto modeInformation = evalSettings.pureEval
|
|
||||||
? "in pure eval mode (use '--impure' to override)"
|
|
||||||
: "in restricted mode";
|
|
||||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Resolve symlinks. */
|
|
||||||
debug("checking access to '%s'", abspath);
|
|
||||||
SourcePath path = rootPath(CanonPath(canonPath(abspath, true)));
|
|
||||||
|
|
||||||
for (auto & i : *allowedPaths) {
|
|
||||||
if (isDirOrInDir(path.path.abs(), i)) {
|
|
||||||
resolvedPaths.insert_or_assign(path_.path.abs(), path);
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void EvalState::checkURI(const std::string & uri)
|
void EvalState::checkURI(const std::string & uri)
|
||||||
{
|
{
|
||||||
if (!evalSettings.restrictEval) return;
|
if (!evalSettings.restrictEval) return;
|
||||||
|
@ -674,12 +618,14 @@ void EvalState::checkURI(const std::string & uri)
|
||||||
/* If the URI is a path, then check it against allowedPaths as
|
/* If the URI is a path, then check it against allowedPaths as
|
||||||
well. */
|
well. */
|
||||||
if (hasPrefix(uri, "/")) {
|
if (hasPrefix(uri, "/")) {
|
||||||
checkSourcePath(rootPath(CanonPath(uri)));
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
|
rootFS2->checkAccess(CanonPath(uri));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasPrefix(uri, "file://")) {
|
if (hasPrefix(uri, "file://")) {
|
||||||
checkSourcePath(rootPath(CanonPath(std::string(uri, 7))));
|
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
|
||||||
|
rootFS2->checkAccess(CanonPath(uri.substr(7)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1181,10 +1127,8 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial)
|
void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
||||||
{
|
{
|
||||||
auto path = checkSourcePath(path_);
|
|
||||||
|
|
||||||
FileEvalCache::iterator i;
|
FileEvalCache::iterator i;
|
||||||
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
|
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
|
||||||
v = i->second;
|
v = i->second;
|
||||||
|
@ -1205,7 +1149,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial
|
||||||
e = j->second;
|
e = j->second;
|
||||||
|
|
||||||
if (!e)
|
if (!e)
|
||||||
e = parseExprFromFile(checkSourcePath(resolvedPath));
|
e = parseExprFromFile(resolvedPath);
|
||||||
|
|
||||||
fileParseCache[resolvedPath] = e;
|
fileParseCache[resolvedPath] = e;
|
||||||
|
|
||||||
|
@ -1514,7 +1458,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v)
|
||||||
e->eval(state, env, vTmp);
|
e->eval(state, env, vTmp);
|
||||||
|
|
||||||
for (auto & i : attrPath) {
|
for (auto & i : attrPath) {
|
||||||
state.forceValue(*vAttrs, noPos);
|
state.forceValue(*vAttrs, getPos());
|
||||||
Bindings::iterator j;
|
Bindings::iterator j;
|
||||||
auto name = getName(i, state, env);
|
auto name = getName(i, state, env);
|
||||||
if (vAttrs->type() != nAttrs ||
|
if (vAttrs->type() != nAttrs ||
|
||||||
|
@ -1683,7 +1627,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
if (countCalls) primOpCalls[name]++;
|
if (countCalls) primOpCalls[name]++;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
vCur.primOp->fun(*this, noPos, args, vCur);
|
vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||||
throw;
|
throw;
|
||||||
|
@ -1731,7 +1675,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
// 1. Unify this and above code. Heavily redundant.
|
// 1. Unify this and above code. Heavily redundant.
|
||||||
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
|
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
|
||||||
// so the debugger allows to inspect the wrong parameters passed to the builtin.
|
// so the debugger allows to inspect the wrong parameters passed to the builtin.
|
||||||
primOp->primOp->fun(*this, noPos, vArgs, vCur);
|
primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||||
throw;
|
throw;
|
||||||
|
@ -1839,7 +1783,7 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos);
|
callFunction(fun, allocValue()->mkAttrs(attrs), res, pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1875,7 +1819,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||||
|
|
||||||
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
v.mkBool(!state.evalBool(env, e, noPos, "in the argument of the not operator")); // XXX: FIXME: !
|
v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: !
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2316,7 +2260,7 @@ BackedStringView EvalState::coerceToString(
|
||||||
std::string result;
|
std::string result;
|
||||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||||
try {
|
try {
|
||||||
result += *coerceToString(noPos, *v2, context,
|
result += *coerceToString(pos, *v2, context,
|
||||||
"while evaluating one element of the list",
|
"while evaluating one element of the list",
|
||||||
coerceMore, copyToStore, canonicalizePath);
|
coerceMore, copyToStore, canonicalizePath);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
|
@ -2463,8 +2407,8 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
|
||||||
|
|
||||||
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v1, noPos);
|
forceValue(v1, pos);
|
||||||
forceValue(v2, noPos);
|
forceValue(v2, pos);
|
||||||
|
|
||||||
/* !!! Hack to support some old broken code that relies on pointer
|
/* !!! Hack to support some old broken code that relies on pointer
|
||||||
equality tests between sets. (Specifically, builderDefs calls
|
equality tests between sets. (Specifically, builderDefs calls
|
||||||
|
|
|
@ -30,7 +30,6 @@ class EvalState;
|
||||||
class StorePath;
|
class StorePath;
|
||||||
struct SingleDerivedPath;
|
struct SingleDerivedPath;
|
||||||
enum RepairFlag : bool;
|
enum RepairFlag : bool;
|
||||||
struct FSInputAccessor;
|
|
||||||
struct MemoryInputAccessor;
|
struct MemoryInputAccessor;
|
||||||
|
|
||||||
|
|
||||||
|
@ -217,18 +216,12 @@ public:
|
||||||
*/
|
*/
|
||||||
RepairFlag repair;
|
RepairFlag repair;
|
||||||
|
|
||||||
/**
|
|
||||||
* The allowed filesystem paths in restricted or pure evaluation
|
|
||||||
* mode.
|
|
||||||
*/
|
|
||||||
std::optional<PathSet> allowedPaths;
|
|
||||||
|
|
||||||
Bindings emptyBindings;
|
Bindings emptyBindings;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The accessor for the root filesystem.
|
* The accessor for the root filesystem.
|
||||||
*/
|
*/
|
||||||
const ref<FSInputAccessor> rootFS;
|
const ref<InputAccessor> rootFS;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The in-memory filesystem for <nix/...> paths.
|
* The in-memory filesystem for <nix/...> paths.
|
||||||
|
@ -396,12 +389,6 @@ public:
|
||||||
*/
|
*/
|
||||||
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
||||||
|
|
||||||
/**
|
|
||||||
* Check whether access to a path is allowed and throw an error if
|
|
||||||
* not. Otherwise return the canonicalised path.
|
|
||||||
*/
|
|
||||||
SourcePath checkSourcePath(const SourcePath & path);
|
|
||||||
|
|
||||||
void checkURI(const std::string & uri);
|
void checkURI(const std::string & uri);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -445,13 +432,15 @@ public:
|
||||||
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Try to resolve a search path value (not the optional key part)
|
* Try to resolve a search path value (not the optional key part).
|
||||||
*
|
*
|
||||||
* If the specified search path element is a URI, download it.
|
* If the specified search path element is a URI, download it.
|
||||||
*
|
*
|
||||||
* If it is not found, return `std::nullopt`
|
* If it is not found, return `std::nullopt`
|
||||||
*/
|
*/
|
||||||
std::optional<std::string> resolveSearchPathPath(const SearchPath::Path & path);
|
std::optional<std::string> resolveSearchPathPath(
|
||||||
|
const SearchPath::Path & elem,
|
||||||
|
bool initAccessControl = false);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Evaluate an expression to normal form
|
* Evaluate an expression to normal form
|
||||||
|
@ -756,6 +745,13 @@ public:
|
||||||
*/
|
*/
|
||||||
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
|
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
|
||||||
|
|
||||||
|
/* Call the binary path filter predicate used builtins.path etc. */
|
||||||
|
bool callPathFilter(
|
||||||
|
Value * filterFun,
|
||||||
|
const SourcePath & path,
|
||||||
|
std::string_view pathArg,
|
||||||
|
PosIdx pos);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -904,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const
|
||||||
// FIXME: as an optimization, if the flake contains a lock file
|
// FIXME: as an optimization, if the flake contains a lock file
|
||||||
// and we haven't changed it, then it's sufficient to use
|
// and we haven't changed it, then it's sufficient to use
|
||||||
// flake.sourceInfo.storePath for the fingerprint.
|
// flake.sourceInfo.storePath for the fingerprint.
|
||||||
return hashString(htSHA256,
|
return hashString(HashAlgorithm::SHA256,
|
||||||
fmt("%s;%s;%d;%d;%s",
|
fmt("%s;%s;%d;%d;%s",
|
||||||
flake.storePath.to_string(),
|
flake.storePath.to_string(),
|
||||||
flake.lockedRef.subdir,
|
flake.lockedRef.subdir,
|
||||||
|
|
|
@ -405,6 +405,7 @@ struct ExprOpNot : Expr
|
||||||
{
|
{
|
||||||
Expr * e;
|
Expr * e;
|
||||||
ExprOpNot(Expr * e) : e(e) { };
|
ExprOpNot(Expr * e) : e(e) { };
|
||||||
|
PosIdx getPos() const override { return e->getPos(); }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -692,16 +692,17 @@ SourcePath resolveExprPath(SourcePath path)
|
||||||
|
|
||||||
/* If `path' is a symlink, follow it. This is so that relative
|
/* If `path' is a symlink, follow it. This is so that relative
|
||||||
path references work. */
|
path references work. */
|
||||||
while (true) {
|
while (!path.path.isRoot()) {
|
||||||
// Basic cycle/depth limit to avoid infinite loops.
|
// Basic cycle/depth limit to avoid infinite loops.
|
||||||
if (++followCount >= maxFollow)
|
if (++followCount >= maxFollow)
|
||||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||||
if (path.lstat().type != InputAccessor::tSymlink) break;
|
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||||
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
|
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||||
|
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||||
}
|
}
|
||||||
|
|
||||||
/* If `path' refers to a directory, append `/default.nix'. */
|
/* If `path' refers to a directory, append `/default.nix'. */
|
||||||
if (path.lstat().type == InputAccessor::tDirectory)
|
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||||
return path + "default.nix";
|
return path + "default.nix";
|
||||||
|
|
||||||
return path;
|
return path;
|
||||||
|
@ -716,7 +717,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
auto buffer = path.readFile();
|
auto buffer = path.resolveSymlinks().readFile();
|
||||||
// readFile hopefully have left some extra space for terminators
|
// readFile hopefully have left some extra space for terminators
|
||||||
buffer.append("\0\0", 2);
|
buffer.append("\0\0", 2);
|
||||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||||
|
@ -783,7 +784,7 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0)
|
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||||
{
|
{
|
||||||
auto & value = value0.s;
|
auto & value = value0.s;
|
||||||
auto i = searchPathResolved.find(value);
|
auto i = searchPathResolved.find(value);
|
||||||
|
@ -800,7 +801,6 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||||
logWarning({
|
logWarning({
|
||||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||||
});
|
});
|
||||||
res = std::nullopt;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -814,6 +814,20 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||||
|
|
||||||
else {
|
else {
|
||||||
auto path = absPath(value);
|
auto path = absPath(value);
|
||||||
|
|
||||||
|
/* Allow access to paths in the search path. */
|
||||||
|
if (initAccessControl) {
|
||||||
|
allowPath(path);
|
||||||
|
if (store->isInStore(path)) {
|
||||||
|
try {
|
||||||
|
StorePathSet closure;
|
||||||
|
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||||
|
for (auto & p : closure)
|
||||||
|
allowPath(p);
|
||||||
|
} catch (InvalidPath &) { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (pathExists(path))
|
if (pathExists(path))
|
||||||
res = { path };
|
res = { path };
|
||||||
else {
|
else {
|
||||||
|
@ -829,7 +843,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
||||||
else
|
else
|
||||||
debug("failed to resolve search path element '%s'", value);
|
debug("failed to resolve search path element '%s'", value);
|
||||||
|
|
||||||
searchPathResolved[value] = res;
|
searchPathResolved.emplace(value, res);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
#include "value-to-json.hh"
|
#include "value-to-json.hh"
|
||||||
#include "value-to-xml.hh"
|
#include "value-to-xml.hh"
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
|
#include "fs-input-accessor.hh"
|
||||||
|
|
||||||
#include <boost/container/small_vector.hpp>
|
#include <boost/container/small_vector.hpp>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
@ -90,9 +91,8 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
for (auto & [outputName, outputPath] : outputs) {
|
for (auto & [outputName, outputPath] : outputs) {
|
||||||
/* Add the output of this derivations to the allowed
|
/* Add the output of this derivations to the allowed
|
||||||
paths. */
|
paths. */
|
||||||
if (allowedPaths) {
|
allowPath(store->toRealPath(outputPath));
|
||||||
allowPath(outputPath);
|
|
||||||
}
|
|
||||||
/* Get all the output paths corresponding to the placeholders we had */
|
/* Get all the output paths corresponding to the placeholders we had */
|
||||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||||
res.insert_or_assign(
|
res.insert_or_assign(
|
||||||
|
@ -110,27 +110,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RealisePathFlags {
|
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
|
||||||
// Whether to check that the path is allowed in pure eval mode
|
|
||||||
bool checkForPureEval = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
|
|
||||||
{
|
{
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
|
|
||||||
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!context.empty()) {
|
if (!context.empty() && path.accessor == state.rootFS) {
|
||||||
auto rewrites = state.realiseContext(context);
|
auto rewrites = state.realiseContext(context);
|
||||||
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
||||||
return {path.accessor, CanonPath(realPath)};
|
path = {path.accessor, CanonPath(realPath)};
|
||||||
}
|
}
|
||||||
|
return resolveSymlinks ? path.resolveSymlinks() : path;
|
||||||
return flags.checkForPureEval
|
|
||||||
? state.checkSourcePath(path)
|
|
||||||
: path;
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
||||||
throw;
|
throw;
|
||||||
|
@ -170,7 +162,7 @@ static void mkOutputString(
|
||||||
argument. */
|
argument. */
|
||||||
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
|
||||||
{
|
{
|
||||||
auto path = realisePath(state, pos, vPath);
|
auto path = realisePath(state, pos, vPath, false);
|
||||||
auto path2 = path.path.abs();
|
auto path2 = path.path.abs();
|
||||||
|
|
||||||
// FIXME
|
// FIXME
|
||||||
|
@ -1317,7 +1309,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
.errPos = state.positions[noPos]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
||||||
|
|
||||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
||||||
|
|
||||||
|
@ -1339,7 +1331,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
.errPos = state.positions[noPos]
|
.errPos = state.positions[noPos]
|
||||||
});
|
});
|
||||||
|
|
||||||
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
||||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||||
|
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
|
@ -1348,13 +1340,13 @@ drvName, Bindings * attrs, Value & v)
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput::Impure {
|
DerivationOutput::Impure {
|
||||||
.method = method,
|
.method = method,
|
||||||
.hashType = ht,
|
.hashAlgo = ha,
|
||||||
});
|
});
|
||||||
else
|
else
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput::CAFloating {
|
DerivationOutput::CAFloating {
|
||||||
.method = method,
|
.method = method,
|
||||||
.hashType = ht,
|
.hashAlgo = ha,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1493,7 +1485,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path;
|
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
||||||
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
||||||
directly in the store. The latter condition is necessary so
|
directly in the store. The latter condition is necessary so
|
||||||
e.g. nix-push does the right thing. */
|
e.g. nix-push does the right thing. */
|
||||||
|
@ -1533,29 +1525,19 @@ static RegisterPrimOp primop_storePath({
|
||||||
|
|
||||||
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
|
try {
|
||||||
auto & arg = *args[0];
|
auto & arg = *args[0];
|
||||||
|
|
||||||
/* We don’t check the path right now, because we don’t want to
|
auto path = realisePath(state, pos, arg);
|
||||||
throw if the path isn’t allowed, but just return false (and we
|
|
||||||
can’t just catch the exception here because we still want to
|
|
||||||
throw if something in the evaluation of `arg` tries to
|
|
||||||
access an unauthorized path). */
|
|
||||||
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
|
|
||||||
|
|
||||||
/* SourcePath doesn't know about trailing slash. */
|
/* SourcePath doesn't know about trailing slash. */
|
||||||
auto mustBeDir = arg.type() == nString
|
auto mustBeDir = arg.type() == nString
|
||||||
&& (arg.string_view().ends_with("/")
|
&& (arg.string_view().ends_with("/")
|
||||||
|| arg.string_view().ends_with("/."));
|
|| arg.string_view().ends_with("/."));
|
||||||
|
|
||||||
try {
|
auto st = path.maybeLstat();
|
||||||
auto checked = state.checkSourcePath(path);
|
|
||||||
auto st = checked.maybeLstat();
|
|
||||||
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
|
||||||
v.mkBool(exists);
|
v.mkBool(exists);
|
||||||
} catch (SysError & e) {
|
|
||||||
/* Don't give away info from errors while canonicalising
|
|
||||||
‘path’ in restricted mode. */
|
|
||||||
v.mkBool(false);
|
|
||||||
} catch (RestrictedPathError & e) {
|
} catch (RestrictedPathError & e) {
|
||||||
v.mkBool(false);
|
v.mkBool(false);
|
||||||
}
|
}
|
||||||
|
@ -1699,7 +1681,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
|
|
||||||
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
|
||||||
|
|
||||||
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
|
v.mkPath(state.findFile(searchPath, path, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_findFile(PrimOp {
|
static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
|
@ -1754,17 +1736,17 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
||||||
/* Return the cryptographic hash of a file in base-16. */
|
/* Return the cryptographic hash of a file in base-16. */
|
||||||
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||||
std::optional<HashType> ht = parseHashType(type);
|
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||||
if (!ht)
|
if (!ha)
|
||||||
state.debugThrowLastTrace(Error({
|
state.debugThrowLastTrace(Error({
|
||||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||||
.errPos = state.positions[pos]
|
.errPos = state.positions[pos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
auto path = realisePath(state, pos, *args[1]);
|
auto path = realisePath(state, pos, *args[1]);
|
||||||
|
|
||||||
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
|
v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_hashFile({
|
static RegisterPrimOp primop_hashFile({
|
||||||
|
@ -1789,7 +1771,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
|
||||||
|
|
||||||
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto path = realisePath(state, pos, *args[0]);
|
auto path = realisePath(state, pos, *args[0], false);
|
||||||
/* Retrieve the directory entry type and stringize it. */
|
/* Retrieve the directory entry type and stringize it. */
|
||||||
v.mkString(fileTypeToString(path.lstat().type));
|
v.mkString(fileTypeToString(path.lstat().type));
|
||||||
}
|
}
|
||||||
|
@ -2178,11 +2160,35 @@ static RegisterPrimOp primop_toFile({
|
||||||
.fun = prim_toFile,
|
.fun = prim_toFile,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
bool EvalState::callPathFilter(
|
||||||
|
Value * filterFun,
|
||||||
|
const SourcePath & path,
|
||||||
|
std::string_view pathArg,
|
||||||
|
PosIdx pos)
|
||||||
|
{
|
||||||
|
auto st = path.lstat();
|
||||||
|
|
||||||
|
/* Call the filter function. The first argument is the path, the
|
||||||
|
second is a string indicating the type of the file. */
|
||||||
|
Value arg1;
|
||||||
|
arg1.mkString(pathArg);
|
||||||
|
|
||||||
|
Value arg2;
|
||||||
|
// assert that type is not "unknown"
|
||||||
|
arg2.mkString(fileTypeToString(st.type));
|
||||||
|
|
||||||
|
Value * args []{&arg1, &arg2};
|
||||||
|
Value res;
|
||||||
|
callFunction(*filterFun, 2, args, res, pos);
|
||||||
|
|
||||||
|
return forceBool(res, pos, "while evaluating the return value of the path filter function");
|
||||||
|
}
|
||||||
|
|
||||||
static void addPath(
|
static void addPath(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
const PosIdx pos,
|
const PosIdx pos,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
Path path,
|
SourcePath path,
|
||||||
Value * filterFun,
|
Value * filterFun,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
const std::optional<Hash> expectedHash,
|
const std::optional<Hash> expectedHash,
|
||||||
|
@ -2190,48 +2196,29 @@ static void addPath(
|
||||||
const NixStringContext & context)
|
const NixStringContext & context)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
|
StorePathSet refs;
|
||||||
|
|
||||||
|
if (path.accessor == state.rootFS && state.store->isInStore(path.path.abs())) {
|
||||||
// FIXME: handle CA derivation outputs (where path needs to
|
// FIXME: handle CA derivation outputs (where path needs to
|
||||||
// be rewritten to the actual output).
|
// be rewritten to the actual output).
|
||||||
auto rewrites = state.realiseContext(context);
|
auto rewrites = state.realiseContext(context);
|
||||||
path = state.toRealPath(rewriteStrings(path, rewrites), context);
|
path = {state.rootFS, CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))};
|
||||||
|
|
||||||
StorePathSet refs;
|
|
||||||
|
|
||||||
if (state.store->isInStore(path)) {
|
|
||||||
try {
|
try {
|
||||||
auto [storePath, subPath] = state.store->toStorePath(path);
|
auto [storePath, subPath] = state.store->toStorePath(path.path.abs());
|
||||||
// FIXME: we should scanForReferences on the path before adding it
|
// FIXME: we should scanForReferences on the path before adding it
|
||||||
refs = state.store->queryPathInfo(storePath)->references;
|
refs = state.store->queryPathInfo(storePath)->references;
|
||||||
path = state.store->toRealPath(storePath) + subPath;
|
path = {state.rootFS, CanonPath(state.store->toRealPath(storePath) + subPath)};
|
||||||
} catch (Error &) { // FIXME: should be InvalidPathError
|
} catch (Error &) { // FIXME: should be InvalidPathError
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
path = evalSettings.pureEval && expectedHash
|
std::unique_ptr<PathFilter> filter;
|
||||||
? path
|
if (filterFun)
|
||||||
: state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs();
|
filter = std::make_unique<PathFilter>([&](const Path & p) {
|
||||||
|
auto p2 = CanonPath(p);
|
||||||
PathFilter filter = filterFun ? ([&](const Path & path) {
|
return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos);
|
||||||
auto st = lstat(path);
|
});
|
||||||
|
|
||||||
/* Call the filter function. The first argument is the path,
|
|
||||||
the second is a string indicating the type of the file. */
|
|
||||||
Value arg1;
|
|
||||||
arg1.mkString(path);
|
|
||||||
|
|
||||||
Value arg2;
|
|
||||||
arg2.mkString(
|
|
||||||
S_ISREG(st.st_mode) ? "regular" :
|
|
||||||
S_ISDIR(st.st_mode) ? "directory" :
|
|
||||||
S_ISLNK(st.st_mode) ? "symlink" :
|
|
||||||
"unknown" /* not supported, will fail! */);
|
|
||||||
|
|
||||||
Value * args []{&arg1, &arg2};
|
|
||||||
Value res;
|
|
||||||
state.callFunction(*filterFun, 2, args, res, pos);
|
|
||||||
|
|
||||||
return state.forceBool(res, pos, "while evaluating the return value of the path filter function");
|
|
||||||
}) : defaultPathFilter;
|
|
||||||
|
|
||||||
std::optional<StorePath> expectedStorePath;
|
std::optional<StorePath> expectedStorePath;
|
||||||
if (expectedHash)
|
if (expectedHash)
|
||||||
|
@ -2242,7 +2229,7 @@ static void addPath(
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair);
|
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
|
||||||
if (expectedHash && expectedStorePath != dstPath)
|
if (expectedHash && expectedStorePath != dstPath)
|
||||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||||
state.allowAndSetStorePathString(dstPath, v);
|
state.allowAndSetStorePathString(dstPath, v);
|
||||||
|
@ -2261,7 +2248,8 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
auto path = state.coerceToPath(pos, *args[1], context,
|
auto path = state.coerceToPath(pos, *args[1], context,
|
||||||
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
|
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
|
||||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
||||||
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
|
||||||
|
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_filterSource({
|
static RegisterPrimOp primop_filterSource({
|
||||||
|
@ -2341,7 +2329,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
else if (n == "recursive")
|
else if (n == "recursive")
|
||||||
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
|
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
||||||
|
@ -2356,7 +2344,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
if (name.empty())
|
if (name.empty())
|
||||||
name = path->baseName();
|
name = path->baseName();
|
||||||
|
|
||||||
addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context);
|
addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_path({
|
static RegisterPrimOp primop_path({
|
||||||
|
@ -3766,18 +3754,18 @@ static RegisterPrimOp primop_stringLength({
|
||||||
/* Return the cryptographic hash of a string in base-16. */
|
/* Return the cryptographic hash of a string in base-16. */
|
||||||
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||||
std::optional<HashType> ht = parseHashType(type);
|
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||||
if (!ht)
|
if (!ha)
|
||||||
state.debugThrowLastTrace(Error({
|
state.debugThrowLastTrace(Error({
|
||||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||||
.errPos = state.positions[pos]
|
.errPos = state.positions[pos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
NixStringContext context; // discarded
|
NixStringContext context; // discarded
|
||||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||||
|
|
||||||
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
|
v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_hashString({
|
static RegisterPrimOp primop_hashString({
|
||||||
|
@ -3800,15 +3788,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
|
||||||
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
||||||
|
|
||||||
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
|
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
|
||||||
std::optional<HashType> ht = std::nullopt;
|
std::optional<HashAlgorithm> ha = std::nullopt;
|
||||||
if (iteratorHashAlgo != inputAttrs->end()) {
|
if (iteratorHashAlgo != inputAttrs->end()) {
|
||||||
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
|
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
|
||||||
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
||||||
|
|
||||||
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
|
v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI));
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_convertHash({
|
static RegisterPrimOp primop_convertHash({
|
||||||
|
@ -3837,7 +3825,8 @@ static RegisterPrimOp primop_convertHash({
|
||||||
|
|
||||||
The format of the resulting hash. Must be one of
|
The format of the resulting hash. Must be one of
|
||||||
- `"base16"`
|
- `"base16"`
|
||||||
- `"base32"`
|
- `"nix32"`
|
||||||
|
- `"base32"` (deprecated alias for `"nix32"`)
|
||||||
- `"base64"`
|
- `"base64"`
|
||||||
- `"sri"`
|
- `"sri"`
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
// be both a revision or a branch/tag name.
|
// be both a revision or a branch/tag name.
|
||||||
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
||||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||||
rev = Hash::parseAny(value, htSHA1);
|
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||||
else
|
else
|
||||||
ref = value;
|
ref = value;
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||||
// Backward compatibility: set 'rev' to
|
// Backward compatibility: set 'rev' to
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1));
|
||||||
attrs2.alloc("rev").mkString(rev2.gitRev());
|
attrs2.alloc("rev").mkString(rev2.gitRev());
|
||||||
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
||||||
if (auto revCount = input2.getRevCount())
|
if (auto revCount = input2.getRevCount())
|
||||||
|
|
|
@ -46,7 +46,7 @@ void emitTreeAttrs(
|
||||||
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
||||||
} else if (emptyRevFallback) {
|
} else if (emptyRevFallback) {
|
||||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||||
auto emptyHash = Hash(htSHA1);
|
auto emptyHash = Hash(HashAlgorithm::SHA1);
|
||||||
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
||||||
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
||||||
}
|
}
|
||||||
|
@ -187,45 +187,215 @@ static RegisterPrimOp primop_fetchTree({
|
||||||
.name = "fetchTree",
|
.name = "fetchTree",
|
||||||
.args = {"input"},
|
.args = {"input"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Fetch a source tree or a plain file using one of the supported backends.
|
Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with:
|
||||||
*input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax.
|
|
||||||
The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled.
|
- the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path)
|
||||||
|
- the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash
|
||||||
|
- backend-specific metadata (currently not documented). <!-- TODO: document output attributes -->
|
||||||
|
|
||||||
|
*input* must be an attribute set with the following attributes:
|
||||||
|
|
||||||
|
- `type` (String, required)
|
||||||
|
|
||||||
|
One of the [supported source types](#source-types).
|
||||||
|
This determines other required and allowed input attributes.
|
||||||
|
|
||||||
|
- `narHash` (String, optional)
|
||||||
|
|
||||||
|
The `narHash` parameter can be used to substitute the source of the tree.
|
||||||
|
It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism.
|
||||||
|
If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available.
|
||||||
|
|
||||||
|
A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again.
|
||||||
|
That is, `fetchTree` is idempotent.
|
||||||
|
|
||||||
|
Downloads are cached in `$XDG_CACHE_HOME/nix`.
|
||||||
|
The remote source will be fetched from the network if both are true:
|
||||||
|
- A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
> [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching.
|
||||||
|
|
||||||
Here are some examples of how to use `fetchTree`:
|
- There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl)
|
||||||
|
|
||||||
- Fetch a GitHub repository using the attribute set representation:
|
## Source types
|
||||||
|
|
||||||
```nix
|
The following source types and associated input attributes are supported.
|
||||||
builtins.fetchTree {
|
|
||||||
type = "github";
|
|
||||||
owner = "NixOS";
|
|
||||||
repo = "nixpkgs";
|
|
||||||
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
This evaluates to the following attribute set:
|
<!-- TODO: It would be soooo much more predictable to work with (and
|
||||||
|
document) if `fetchTree` was a curried call with the first paramter for
|
||||||
|
`type` or an attribute like `builtins.fetchTree.git`! -->
|
||||||
|
|
||||||
```
|
- `"file"`
|
||||||
{
|
|
||||||
lastModified = 1686503798;
|
|
||||||
lastModifiedDate = "20230611171638";
|
|
||||||
narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
|
||||||
outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
|
||||||
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
|
||||||
shortRev = "ae2e6b3";
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- Fetch the same GitHub repository using the URL-like syntax:
|
Place a plain file into the Nix store.
|
||||||
|
This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl)
|
||||||
|
|
||||||
```
|
- `url` (String, required)
|
||||||
builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
|
|
||||||
```
|
Supported protocols:
|
||||||
|
|
||||||
|
- `https`
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "file";
|
||||||
|
> url = "https://example.com/index.html";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
- `http`
|
||||||
|
|
||||||
|
Insecure HTTP transfer for legacy sources.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> HTTP performs no encryption or authentication.
|
||||||
|
> Use a `narHash` known in advance to ensure the output has expected contents.
|
||||||
|
|
||||||
|
- `file`
|
||||||
|
|
||||||
|
A file on the local file system.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "file";
|
||||||
|
> url = "file:///home/eelco/nix/README.md";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
- `"tarball"`
|
||||||
|
|
||||||
|
Download a tar archive and extract it into the Nix store.
|
||||||
|
This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
|
||||||
|
|
||||||
|
- `url` (String, required)
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "tarball";
|
||||||
|
> url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
- `"git"`
|
||||||
|
|
||||||
|
Fetch a Git tree and copy it to the Nix store.
|
||||||
|
This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit).
|
||||||
|
|
||||||
|
- `url` (String, required)
|
||||||
|
|
||||||
|
The URL formats supported are the same as for Git itself.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> fetchTree {
|
||||||
|
> type = "git";
|
||||||
|
> url = "git@github.com:NixOS/nixpkgs.git";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory.
|
||||||
|
|
||||||
|
- `ref` (String, optional)
|
||||||
|
|
||||||
|
A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name.
|
||||||
|
|
||||||
|
Default: `"HEAD"`
|
||||||
|
|
||||||
|
- `rev` (String, optional)
|
||||||
|
|
||||||
|
A Git revision; a commit hash.
|
||||||
|
|
||||||
|
Default: the tip of `ref`
|
||||||
|
|
||||||
|
- `shallow` (Bool, optional)
|
||||||
|
|
||||||
|
Make a shallow clone when fetching the Git tree.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
- `submodules` (Bool, optional)
|
||||||
|
|
||||||
|
Also fetch submodules if available.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
- `allRefs` (Bool, optional)
|
||||||
|
|
||||||
|
If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache.
|
||||||
|
Otherwise, only the latest commit is fetched if it is not already cached.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
- `lastModified` (Integer, optional)
|
||||||
|
|
||||||
|
Unix timestamp of the fetched commit.
|
||||||
|
|
||||||
|
If set, pass through the value to the output attribute set.
|
||||||
|
Otherwise, generated from the fetched Git tree.
|
||||||
|
|
||||||
|
- `revCount` (Integer, optional)
|
||||||
|
|
||||||
|
Number of revisions in the history of the Git repository before the fetched commit.
|
||||||
|
|
||||||
|
If set, pass through the value to the output attribute set.
|
||||||
|
Otherwise, generated from the fetched Git tree.
|
||||||
|
|
||||||
|
The following input types are still subject to change:
|
||||||
|
|
||||||
|
- `"path"`
|
||||||
|
- `"github"`
|
||||||
|
- `"gitlab"`
|
||||||
|
- `"sourcehut"`
|
||||||
|
- `"mercurial"`
|
||||||
|
|
||||||
|
*input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references).
|
||||||
|
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> Fetch a GitHub repository using the attribute set representation:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> builtins.fetchTree {
|
||||||
|
> type = "github";
|
||||||
|
> owner = "NixOS";
|
||||||
|
> repo = "nixpkgs";
|
||||||
|
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> This evaluates to the following attribute set:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> {
|
||||||
|
> lastModified = 1686503798;
|
||||||
|
> lastModifiedDate = "20230611171638";
|
||||||
|
> narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
|
||||||
|
> outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
|
||||||
|
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
|
||||||
|
> shortRev = "ae2e6b3";
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> Fetch the same GitHub repository using the URL-like syntax:
|
||||||
|
>
|
||||||
|
> ```nix
|
||||||
|
> builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
|
||||||
|
> ```
|
||||||
)",
|
)",
|
||||||
.fun = prim_fetchTree,
|
.fun = prim_fetchTree,
|
||||||
.experimentalFeature = Xp::FetchTree,
|
.experimentalFeature = Xp::FetchTree,
|
||||||
|
@ -246,7 +416,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
|
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||||
else
|
else
|
||||||
|
@ -276,7 +446,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
||||||
|
|
||||||
// early exit if pinned and already in the store
|
// early exit if pinned and already in the store
|
||||||
if (expectedHash && expectedHash->type == htSHA256) {
|
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||||
auto expectedPath = state.store->makeFixedOutputPath(
|
auto expectedPath = state.store->makeFixedOutputPath(
|
||||||
name,
|
name,
|
||||||
FixedOutputInfo {
|
FixedOutputInfo {
|
||||||
|
@ -301,10 +471,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
if (expectedHash) {
|
if (expectedHash) {
|
||||||
auto hash = unpack
|
auto hash = unpack
|
||||||
? state.store->queryPathInfo(storePath)->narHash
|
? state.store->queryPathInfo(storePath)->narHash
|
||||||
: hashFile(htSHA256, state.store->toRealPath(storePath));
|
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
|
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
|
||||||
}
|
}
|
||||||
|
|
||||||
state.allowAndSetStorePathString(storePath, v);
|
state.allowAndSetStorePathString(storePath, v);
|
||||||
|
|
|
@ -423,10 +423,9 @@ public:
|
||||||
SourcePath path() const
|
SourcePath path() const
|
||||||
{
|
{
|
||||||
assert(internalType == tPath);
|
assert(internalType == tPath);
|
||||||
return SourcePath {
|
return SourcePath(
|
||||||
.accessor = ref(_path.accessor->shared_from_this()),
|
ref(_path.accessor->shared_from_this()),
|
||||||
.path = CanonPath(CanonPath::unchecked_t(), _path.path)
|
CanonPath(CanonPath::unchecked_t(), _path.path));
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view string_view() const
|
std::string_view string_view() const
|
||||||
|
|
|
@ -289,8 +289,8 @@ std::string Input::getType() const
|
||||||
std::optional<Hash> Input::getNarHash() const
|
std::optional<Hash> Input::getNarHash() const
|
||||||
{
|
{
|
||||||
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
|
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
|
||||||
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
|
auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s);
|
||||||
if (hash.type != htSHA256)
|
if (hash.algo != HashAlgorithm::SHA256)
|
||||||
throw UsageError("narHash must use SHA-256");
|
throw UsageError("narHash must use SHA-256");
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
@ -314,7 +314,7 @@ std::optional<Hash> Input::getRev() const
|
||||||
} catch (BadHash &e) {
|
} catch (BadHash &e) {
|
||||||
// Default to sha1 for backwards compatibility with existing
|
// Default to sha1 for backwards compatibility with existing
|
||||||
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
||||||
hash = Hash::parseAny(*s, htSHA1);
|
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
||||||
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
||||||
{
|
{
|
||||||
auto [accessor, input2] = getAccessor(store, input);
|
auto [accessor, input2] = getAccessor(store, input);
|
||||||
auto storePath = accessor->root().fetchToStore(store, input2.getName());
|
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
|
||||||
return {storePath, input2};
|
return {storePath, input2};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
83
src/libfetchers/filtering-input-accessor.cc
Normal file
83
src/libfetchers/filtering-input-accessor.cc
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
#include "filtering-input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
std::string FilteringInputAccessor::readFile(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
return next->readFile(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FilteringInputAccessor::pathExists(const CanonPath & path)
|
||||||
|
{
|
||||||
|
return isAllowed(path) && next->pathExists(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
return next->maybeLstat(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
DirEntries entries;
|
||||||
|
for (auto & entry : next->readDirectory(prefix + path)) {
|
||||||
|
if (isAllowed(path + entry.first))
|
||||||
|
entries.insert(std::move(entry));
|
||||||
|
}
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string FilteringInputAccessor::readLink(const CanonPath & path)
|
||||||
|
{
|
||||||
|
checkAccess(path);
|
||||||
|
return next->readLink(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string FilteringInputAccessor::showPath(const CanonPath & path)
|
||||||
|
{
|
||||||
|
return next->showPath(prefix + path);
|
||||||
|
}
|
||||||
|
|
||||||
|
void FilteringInputAccessor::checkAccess(const CanonPath & path)
|
||||||
|
{
|
||||||
|
if (!isAllowed(path))
|
||||||
|
throw makeNotAllowedError
|
||||||
|
? makeNotAllowedError(path)
|
||||||
|
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
struct AllowListInputAccessorImpl : AllowListInputAccessor
|
||||||
|
{
|
||||||
|
std::set<CanonPath> allowedPaths;
|
||||||
|
|
||||||
|
AllowListInputAccessorImpl(
|
||||||
|
ref<InputAccessor> next,
|
||||||
|
std::set<CanonPath> && allowedPaths,
|
||||||
|
MakeNotAllowedError && makeNotAllowedError)
|
||||||
|
: AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
|
||||||
|
, allowedPaths(std::move(allowedPaths))
|
||||||
|
{ }
|
||||||
|
|
||||||
|
bool isAllowed(const CanonPath & path) override
|
||||||
|
{
|
||||||
|
return path.isAllowed(allowedPaths);
|
||||||
|
}
|
||||||
|
|
||||||
|
void allowPath(CanonPath path) override
|
||||||
|
{
|
||||||
|
allowedPaths.insert(std::move(path));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ref<AllowListInputAccessor> AllowListInputAccessor::create(
|
||||||
|
ref<InputAccessor> next,
|
||||||
|
std::set<CanonPath> && allowedPaths,
|
||||||
|
MakeNotAllowedError && makeNotAllowedError)
|
||||||
|
{
|
||||||
|
return make_ref<AllowListInputAccessorImpl>(next, std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
73
src/libfetchers/filtering-input-accessor.hh
Normal file
73
src/libfetchers/filtering-input-accessor.hh
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "input-accessor.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A function that should throw an exception of type
|
||||||
|
* `RestrictedPathError` explaining that access to `path` is
|
||||||
|
* forbidden.
|
||||||
|
*/
|
||||||
|
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An abstract wrapping `InputAccessor` that performs access
|
||||||
|
* control. Subclasses should override `isAllowed()` to implement an
|
||||||
|
* access control policy. The error message is customized at construction.
|
||||||
|
*/
|
||||||
|
struct FilteringInputAccessor : InputAccessor
|
||||||
|
{
|
||||||
|
ref<InputAccessor> next;
|
||||||
|
CanonPath prefix;
|
||||||
|
MakeNotAllowedError makeNotAllowedError;
|
||||||
|
|
||||||
|
FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError)
|
||||||
|
: next(src.accessor)
|
||||||
|
, prefix(src.path)
|
||||||
|
, makeNotAllowedError(std::move(makeNotAllowedError))
|
||||||
|
{ }
|
||||||
|
|
||||||
|
std::string readFile(const CanonPath & path) override;
|
||||||
|
|
||||||
|
bool pathExists(const CanonPath & path) override;
|
||||||
|
|
||||||
|
std::optional<Stat> maybeLstat(const CanonPath & path) override;
|
||||||
|
|
||||||
|
DirEntries readDirectory(const CanonPath & path) override;
|
||||||
|
|
||||||
|
std::string readLink(const CanonPath & path) override;
|
||||||
|
|
||||||
|
std::string showPath(const CanonPath & path) override;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call `makeNotAllowedError` to throw a `RestrictedPathError`
|
||||||
|
* exception if `isAllowed()` returns `false` for `path`.
|
||||||
|
*/
|
||||||
|
void checkAccess(const CanonPath & path);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return `true` iff access to path is allowed.
|
||||||
|
*/
|
||||||
|
virtual bool isAllowed(const CanonPath & path) = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A wrapping `InputAccessor` that checks paths against an allow-list.
|
||||||
|
*/
|
||||||
|
struct AllowListInputAccessor : public FilteringInputAccessor
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Grant access to the specified path.
|
||||||
|
*/
|
||||||
|
virtual void allowPath(CanonPath path) = 0;
|
||||||
|
|
||||||
|
static ref<AllowListInputAccessor> create(
|
||||||
|
ref<InputAccessor> next,
|
||||||
|
std::set<CanonPath> && allowedPaths,
|
||||||
|
MakeNotAllowedError && makeNotAllowedError);
|
||||||
|
|
||||||
|
using FilteringInputAccessor::FilteringInputAccessor;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -4,19 +4,12 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
|
||||||
{
|
{
|
||||||
CanonPath root;
|
CanonPath root;
|
||||||
std::optional<std::set<CanonPath>> allowedPaths;
|
|
||||||
MakeNotAllowedError makeNotAllowedError;
|
|
||||||
|
|
||||||
FSInputAccessorImpl(
|
FSInputAccessor(const CanonPath & root)
|
||||||
const CanonPath & root,
|
|
||||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
|
||||||
MakeNotAllowedError && makeNotAllowedError)
|
|
||||||
: root(root)
|
: root(root)
|
||||||
, allowedPaths(std::move(allowedPaths))
|
|
||||||
, makeNotAllowedError(std::move(makeNotAllowedError))
|
|
||||||
{
|
{
|
||||||
displayPrefix = root.isRoot() ? "" : root.abs();
|
displayPrefix = root.isRoot() ? "" : root.abs();
|
||||||
}
|
}
|
||||||
|
@ -27,39 +20,30 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||||
std::function<void(uint64_t)> sizeCallback) override
|
std::function<void(uint64_t)> sizeCallback) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
auto absPath = makeAbsPath(path);
|
||||||
checkAllowed(absPath);
|
|
||||||
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool pathExists(const CanonPath & path) override
|
bool pathExists(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
return PosixSourceAccessor::pathExists(makeAbsPath(path));
|
||||||
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
|
||||||
checkAllowed(absPath);
|
|
||||||
return PosixSourceAccessor::maybeLstat(absPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DirEntries readDirectory(const CanonPath & path) override
|
DirEntries readDirectory(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
|
||||||
checkAllowed(absPath);
|
|
||||||
DirEntries res;
|
DirEntries res;
|
||||||
for (auto & entry : PosixSourceAccessor::readDirectory(absPath))
|
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
|
||||||
if (isAllowed(absPath + entry.first))
|
|
||||||
res.emplace(entry);
|
res.emplace(entry);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string readLink(const CanonPath & path) override
|
std::string readLink(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
auto absPath = makeAbsPath(path);
|
return PosixSourceAccessor::readLink(makeAbsPath(path));
|
||||||
checkAllowed(absPath);
|
|
||||||
return PosixSourceAccessor::readLink(absPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CanonPath makeAbsPath(const CanonPath & path)
|
CanonPath makeAbsPath(const CanonPath & path)
|
||||||
|
@ -67,59 +51,22 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
|
||||||
return root + path;
|
return root + path;
|
||||||
}
|
}
|
||||||
|
|
||||||
void checkAllowed(const CanonPath & absPath) override
|
|
||||||
{
|
|
||||||
if (!isAllowed(absPath))
|
|
||||||
throw makeNotAllowedError
|
|
||||||
? makeNotAllowedError(absPath)
|
|
||||||
: RestrictedPathError("access to path '%s' is forbidden", absPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isAllowed(const CanonPath & absPath)
|
|
||||||
{
|
|
||||||
if (!absPath.isWithin(root))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (allowedPaths) {
|
|
||||||
auto p = absPath.removePrefix(root);
|
|
||||||
if (!p.isAllowed(*allowedPaths))
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void allowPath(CanonPath path) override
|
|
||||||
{
|
|
||||||
if (allowedPaths)
|
|
||||||
allowedPaths->insert(std::move(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool hasAccessControl() override
|
|
||||||
{
|
|
||||||
return (bool) allowedPaths;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
||||||
{
|
{
|
||||||
return makeAbsPath(path);
|
return makeAbsPath(path);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<FSInputAccessor> makeFSInputAccessor(
|
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
|
||||||
const CanonPath & root,
|
|
||||||
std::optional<std::set<CanonPath>> && allowedPaths,
|
|
||||||
MakeNotAllowedError && makeNotAllowedError)
|
|
||||||
{
|
{
|
||||||
return make_ref<FSInputAccessorImpl>(root, std::move(allowedPaths), std::move(makeNotAllowedError));
|
return make_ref<FSInputAccessor>(root);
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<FSInputAccessor> makeStorePathAccessor(
|
ref<InputAccessor> makeStorePathAccessor(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const StorePath & storePath,
|
const StorePath & storePath)
|
||||||
MakeNotAllowedError && makeNotAllowedError)
|
|
||||||
{
|
{
|
||||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError));
|
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath getUnfilteredRootPath(CanonPath path)
|
SourcePath getUnfilteredRootPath(CanonPath path)
|
||||||
|
|
|
@ -7,26 +7,12 @@ namespace nix {
|
||||||
class StorePath;
|
class StorePath;
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
struct FSInputAccessor : InputAccessor
|
ref<InputAccessor> makeFSInputAccessor(
|
||||||
{
|
const CanonPath & root);
|
||||||
virtual void checkAllowed(const CanonPath & absPath) = 0;
|
|
||||||
|
|
||||||
virtual void allowPath(CanonPath path) = 0;
|
ref<InputAccessor> makeStorePathAccessor(
|
||||||
|
|
||||||
virtual bool hasAccessControl() = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
|
|
||||||
|
|
||||||
ref<FSInputAccessor> makeFSInputAccessor(
|
|
||||||
const CanonPath & root,
|
|
||||||
std::optional<std::set<CanonPath>> && allowedPaths = {},
|
|
||||||
MakeNotAllowedError && makeNotAllowedError = {});
|
|
||||||
|
|
||||||
ref<FSInputAccessor> makeStorePathAccessor(
|
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const StorePath & storePath,
|
const StorePath & storePath);
|
||||||
MakeNotAllowedError && makeNotAllowedError = {});
|
|
||||||
|
|
||||||
SourcePath getUnfilteredRootPath(CanonPath path);
|
SourcePath getUnfilteredRootPath(CanonPath path);
|
||||||
|
|
||||||
|
|
|
@ -91,7 +91,7 @@ Hash toHash(const git_oid & oid)
|
||||||
#ifdef GIT_EXPERIMENTAL_SHA256
|
#ifdef GIT_EXPERIMENTAL_SHA256
|
||||||
assert(oid.type == GIT_OID_SHA1);
|
assert(oid.type == GIT_OID_SHA1);
|
||||||
#endif
|
#endif
|
||||||
Hash hash(htSHA1);
|
Hash hash(HashAlgorithm::SHA1);
|
||||||
memcpy(hash.hash, oid.id, hash.hashSize);
|
memcpy(hash.hash, oid.id, hash.hashSize);
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
@ -439,7 +439,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
||||||
for (const fetchers::PublicKey & k : publicKeys){
|
for (const fetchers::PublicKey & k : publicKeys){
|
||||||
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
||||||
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
||||||
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
||||||
re += "(" + escaped_fingerprint + ")";
|
re += "(" + escaped_fingerprint + ")";
|
||||||
}
|
}
|
||||||
|
@ -554,7 +554,7 @@ struct GitInputAccessor : InputAccessor
|
||||||
return toHash(*git_tree_entry_id(entry));
|
return toHash(*git_tree_entry_id(entry));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::map<CanonPath, TreeEntry> lookupCache;
|
std::unordered_map<CanonPath, TreeEntry> lookupCache;
|
||||||
|
|
||||||
/* Recursively look up 'path' relative to the root. */
|
/* Recursively look up 'path' relative to the root. */
|
||||||
git_tree_entry * lookup(const CanonPath & path)
|
git_tree_entry * lookup(const CanonPath & path)
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
#include "processes.hh"
|
#include "processes.hh"
|
||||||
#include "git.hh"
|
#include "git.hh"
|
||||||
#include "fs-input-accessor.hh"
|
#include "fs-input-accessor.hh"
|
||||||
|
#include "filtering-input-accessor.hh"
|
||||||
#include "mounted-input-accessor.hh"
|
#include "mounted-input-accessor.hh"
|
||||||
#include "git-utils.hh"
|
#include "git-utils.hh"
|
||||||
#include "logging.hh"
|
#include "logging.hh"
|
||||||
|
@ -52,7 +53,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
||||||
Path getCachePath(std::string_view key)
|
Path getCachePath(std::string_view key)
|
||||||
{
|
{
|
||||||
return getCacheDir() + "/nix/gitv3/" +
|
return getCacheDir() + "/nix/gitv3/" +
|
||||||
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
|
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the name of the HEAD branch.
|
// Returns the name of the HEAD branch.
|
||||||
|
@ -369,7 +370,7 @@ struct GitInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||||
{
|
{
|
||||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
|
||||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -559,7 +560,7 @@ struct GitInputScheme : InputScheme
|
||||||
repoInfo.url
|
repoInfo.url
|
||||||
);
|
);
|
||||||
} else
|
} else
|
||||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
|
||||||
|
|
||||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||||
}
|
}
|
||||||
|
@ -639,7 +640,10 @@ struct GitInputScheme : InputScheme
|
||||||
repoInfo.workdirInfo.files.insert(submodule.path);
|
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||||
|
|
||||||
ref<InputAccessor> accessor =
|
ref<InputAccessor> accessor =
|
||||||
makeFSInputAccessor(CanonPath(repoInfo.url), repoInfo.workdirInfo.files, makeNotAllowedError(repoInfo.url));
|
AllowListInputAccessor::create(
|
||||||
|
makeFSInputAccessor(CanonPath(repoInfo.url)),
|
||||||
|
std::move(repoInfo.workdirInfo.files),
|
||||||
|
makeNotAllowedError(repoInfo.url));
|
||||||
|
|
||||||
/* If the repo has submodules, return a mounted input accessor
|
/* If the repo has submodules, return a mounted input accessor
|
||||||
consisting of the accessor for the top-level repo and the
|
consisting of the accessor for the top-level repo and the
|
||||||
|
|
|
@ -42,7 +42,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
auto size = path.size();
|
auto size = path.size();
|
||||||
if (size == 3) {
|
if (size == 3) {
|
||||||
if (std::regex_match(path[2], revRegex))
|
if (std::regex_match(path[2], revRegex))
|
||||||
rev = Hash::parseAny(path[2], htSHA1);
|
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
|
||||||
else if (std::regex_match(path[2], refRegex))
|
else if (std::regex_match(path[2], refRegex))
|
||||||
ref = path[2];
|
ref = path[2];
|
||||||
else
|
else
|
||||||
|
@ -68,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
if (name == "rev") {
|
if (name == "rev") {
|
||||||
if (rev)
|
if (rev)
|
||||||
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
|
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
|
||||||
rev = Hash::parseAny(value, htSHA1);
|
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||||
}
|
}
|
||||||
else if (name == "ref") {
|
else if (name == "ref") {
|
||||||
if (!std::regex_match(value, refRegex))
|
if (!std::regex_match(value, refRegex))
|
||||||
|
@ -284,7 +284,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false, headers).storePath)));
|
downloadFile(store, url, "source", false, headers).storePath)));
|
||||||
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
|
auto rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1);
|
||||||
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||||
return rev;
|
return rev;
|
||||||
}
|
}
|
||||||
|
@ -356,7 +356,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false, headers).storePath)));
|
downloadFile(store, url, "source", false, headers).storePath)));
|
||||||
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
|
auto rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1);
|
||||||
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||||
return rev;
|
return rev;
|
||||||
}
|
}
|
||||||
|
@ -448,7 +448,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
if(!id)
|
if(!id)
|
||||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||||
|
|
||||||
auto rev = Hash::parseAny(*id, htSHA1);
|
auto rev = Hash::parseAny(*id, HashAlgorithm::SHA1);
|
||||||
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
||||||
return rev;
|
return rev;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme
|
||||||
if (path.size() == 1) {
|
if (path.size() == 1) {
|
||||||
} else if (path.size() == 2) {
|
} else if (path.size() == 2) {
|
||||||
if (std::regex_match(path[1], revRegex))
|
if (std::regex_match(path[1], revRegex))
|
||||||
rev = Hash::parseAny(path[1], htSHA1);
|
rev = Hash::parseAny(path[1], HashAlgorithm::SHA1);
|
||||||
else if (std::regex_match(path[1], refRegex))
|
else if (std::regex_match(path[1], refRegex))
|
||||||
ref = path[1];
|
ref = path[1];
|
||||||
else
|
else
|
||||||
|
@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme
|
||||||
ref = path[1];
|
ref = path[1];
|
||||||
if (!std::regex_match(path[2], revRegex))
|
if (!std::regex_match(path[2], revRegex))
|
||||||
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
||||||
rev = Hash::parseAny(path[2], htSHA1);
|
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
|
||||||
} else
|
} else
|
||||||
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
||||||
|
|
||||||
|
|
|
@ -44,8 +44,8 @@ StorePath InputAccessor::fetchToStore(
|
||||||
|
|
||||||
auto storePath =
|
auto storePath =
|
||||||
settings.readOnlyMode
|
settings.readOnlyMode
|
||||||
? store->computeStorePathFromDump(*source, name, method, htSHA256).first
|
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
|
||||||
: store->addToStoreFromDump(*source, name, method, htSHA256, repair);
|
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
|
||||||
|
|
||||||
if (cacheKey)
|
if (cacheKey)
|
||||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||||
|
@ -53,11 +53,6 @@ StorePath InputAccessor::fetchToStore(
|
||||||
return storePath;
|
return storePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath InputAccessor::root()
|
|
||||||
{
|
|
||||||
return {ref(shared_from_this()), CanonPath::root};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
||||||
{
|
{
|
||||||
str << path.to_string();
|
str << path.to_string();
|
||||||
|
@ -88,7 +83,7 @@ SourcePath SourcePath::parent() const
|
||||||
|
|
||||||
SourcePath SourcePath::resolveSymlinks() const
|
SourcePath SourcePath::resolveSymlinks() const
|
||||||
{
|
{
|
||||||
auto res = accessor->root();
|
auto res = SourcePath(accessor);
|
||||||
|
|
||||||
int linksAllowed = 1024;
|
int linksAllowed = 1024;
|
||||||
|
|
||||||
|
|
|
@ -36,8 +36,6 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<Inpu
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||||
PathFilter * filter = nullptr,
|
PathFilter * filter = nullptr,
|
||||||
RepairFlag repair = NoRepair);
|
RepairFlag repair = NoRepair);
|
||||||
|
|
||||||
SourcePath root();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -51,6 +49,11 @@ struct SourcePath
|
||||||
ref<InputAccessor> accessor;
|
ref<InputAccessor> accessor;
|
||||||
CanonPath path;
|
CanonPath path;
|
||||||
|
|
||||||
|
SourcePath(ref<InputAccessor> accessor, CanonPath path = CanonPath::root)
|
||||||
|
: accessor(std::move(accessor))
|
||||||
|
, path(std::move(path))
|
||||||
|
{ }
|
||||||
|
|
||||||
std::string_view baseName() const;
|
std::string_view baseName() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -210,7 +210,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
return files.count(file);
|
return files.count(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
|
||||||
|
|
||||||
return {std::move(storePath), input};
|
return {std::move(storePath), input};
|
||||||
}
|
}
|
||||||
|
@ -220,7 +220,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
|
|
||||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||||
{
|
{
|
||||||
if (hash.has_value() && hash->type != htSHA1)
|
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
|
||||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -260,14 +260,14 @@ struct MercurialInputScheme : InputScheme
|
||||||
});
|
});
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
|
||||||
if (!input.getRev() || input.getRev() == rev2) {
|
if (!input.getRev() || input.getRev() == rev2) {
|
||||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
|
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
||||||
|
|
||||||
/* If this is a commit hash that we already have, we don't
|
/* If this is a commit hash that we already have, we don't
|
||||||
have to pull again. */
|
have to pull again. */
|
||||||
|
@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
||||||
assert(tokens.size() == 3);
|
assert(tokens.size() == 3);
|
||||||
|
|
||||||
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
|
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
|
||||||
auto revCount = std::stoull(tokens[1]);
|
auto revCount = std::stoull(tokens[1]);
|
||||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||||
|
|
||||||
|
|
|
@ -73,7 +73,7 @@ DownloadFileResult downloadFile(
|
||||||
} else {
|
} else {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(res.data, sink);
|
dumpString(res.data, sink);
|
||||||
auto hash = hashString(htSHA256, res.data);
|
auto hash = hashString(HashAlgorithm::SHA256, res.data);
|
||||||
ValidPathInfo info {
|
ValidPathInfo info {
|
||||||
*store,
|
*store,
|
||||||
name,
|
name,
|
||||||
|
@ -82,7 +82,7 @@ DownloadFileResult downloadFile(
|
||||||
.hash = hash,
|
.hash = hash,
|
||||||
.references = {},
|
.references = {},
|
||||||
},
|
},
|
||||||
hashString(htSHA256, sink.s),
|
hashString(HashAlgorithm::SHA256, sink.s),
|
||||||
};
|
};
|
||||||
info.narSize = sink.s.size();
|
info.narSize = sink.s.size();
|
||||||
auto source = StringSource { sink.s };
|
auto source = StringSource { sink.s };
|
||||||
|
@ -156,7 +156,7 @@ DownloadTarballResult downloadTarball(
|
||||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||||
lastModified = lstat(topDir).st_mtime;
|
lastModified = lstat(topDir).st_mtime;
|
||||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
|
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
|
|
|
@ -143,9 +143,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
|
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
|
||||||
write the compressed NAR to disk), into a HashSink (to get the
|
write the compressed NAR to disk), into a HashSink (to get the
|
||||||
NAR hash), and into a NarAccessor (to get the NAR listing). */
|
NAR hash), and into a NarAccessor (to get the NAR listing). */
|
||||||
HashSink fileHashSink { htSHA256 };
|
HashSink fileHashSink { HashAlgorithm::SHA256 };
|
||||||
std::shared_ptr<SourceAccessor> narAccessor;
|
std::shared_ptr<SourceAccessor> narAccessor;
|
||||||
HashSink narHashSink { htSHA256 };
|
HashSink narHashSink { HashAlgorithm::SHA256 };
|
||||||
{
|
{
|
||||||
FdSink fileSink(fdTemp.get());
|
FdSink fileSink(fdTemp.get());
|
||||||
TeeSink teeSinkCompressed { fileSink, fileHashSink };
|
TeeSink teeSinkCompressed { fileSink, fileHashSink };
|
||||||
|
@ -165,7 +165,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
||||||
auto [fileHash, fileSize] = fileHashSink.finish();
|
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||||
narInfo->fileHash = fileHash;
|
narInfo->fileHash = fileHash;
|
||||||
narInfo->fileSize = fileSize;
|
narInfo->fileSize = fileSize;
|
||||||
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
|
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar"
|
||||||
+ (compression == "xz" ? ".xz" :
|
+ (compression == "xz" ? ".xz" :
|
||||||
compression == "bzip2" ? ".bz2" :
|
compression == "bzip2" ? ".bz2" :
|
||||||
compression == "zstd" ? ".zst" :
|
compression == "zstd" ? ".zst" :
|
||||||
|
@ -301,9 +301,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||||
{
|
{
|
||||||
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
|
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||||
unsupported("addToStoreFromDump");
|
unsupported("addToStoreFromDump");
|
||||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||||
ValidPathInfo info {
|
ValidPathInfo info {
|
||||||
|
@ -402,7 +402,7 @@ StorePath BinaryCacheStore::addToStore(
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
const Path & srcPath,
|
const Path & srcPath,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
HashType hashAlgo,
|
HashAlgorithm hashAlgo,
|
||||||
PathFilter & filter,
|
PathFilter & filter,
|
||||||
RepairFlag repair,
|
RepairFlag repair,
|
||||||
const StorePathSet & references)
|
const StorePathSet & references)
|
||||||
|
@ -448,7 +448,7 @@ StorePath BinaryCacheStore::addTextToStore(
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
RepairFlag repair)
|
RepairFlag repair)
|
||||||
{
|
{
|
||||||
auto textHash = hashString(htSHA256, s);
|
auto textHash = hashString(HashAlgorithm::SHA256, s);
|
||||||
auto path = makeTextPath(name, TextInfo { { textHash }, references });
|
auto path = makeTextPath(name, TextInfo { { textHash }, references });
|
||||||
|
|
||||||
if (!repair && isValidPath(path))
|
if (!repair && isValidPath(path))
|
||||||
|
|
|
@ -124,13 +124,13 @@ public:
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
|
|
||||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||||
|
|
||||||
StorePath addToStore(
|
StorePath addToStore(
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
const Path & srcPath,
|
const Path & srcPath,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
HashType hashAlgo,
|
HashAlgorithm hashAlgo,
|
||||||
PathFilter & filter,
|
PathFilter & filter,
|
||||||
RepairFlag repair,
|
RepairFlag repair,
|
||||||
const StorePathSet & references) override;
|
const StorePathSet & references) override;
|
||||||
|
|
|
@ -558,7 +558,7 @@ void DerivationGoal::inputsRealised()
|
||||||
inputDrvOutputs statefully, sometimes it gets out of sync with
|
inputDrvOutputs statefully, sometimes it gets out of sync with
|
||||||
the real source of truth (store). So we query the store
|
the real source of truth (store). So we query the store
|
||||||
directly if there's a problem. */
|
directly if there's a problem. */
|
||||||
attempt = fullDrv.tryResolve(worker.store);
|
attempt = fullDrv.tryResolve(worker.store, &worker.evalStore);
|
||||||
}
|
}
|
||||||
assert(attempt);
|
assert(attempt);
|
||||||
Derivation drvResolved { std::move(*attempt) };
|
Derivation drvResolved { std::move(*attempt) };
|
||||||
|
|
|
@ -1066,8 +1066,8 @@ void LocalDerivationGoal::initTmpDir() {
|
||||||
if (passAsFile.find(i.first) == passAsFile.end()) {
|
if (passAsFile.find(i.first) == passAsFile.end()) {
|
||||||
env[i.first] = i.second;
|
env[i.first] = i.second;
|
||||||
} else {
|
} else {
|
||||||
auto hash = hashString(htSHA256, i.first);
|
auto hash = hashString(HashAlgorithm::SHA256, i.first);
|
||||||
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
|
std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false);
|
||||||
Path p = tmpDir + "/" + fn;
|
Path p = tmpDir + "/" + fn;
|
||||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||||
chownToBuilder(p);
|
chownToBuilder(p);
|
||||||
|
@ -1293,7 +1293,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
const Path & srcPath,
|
const Path & srcPath,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
HashType hashAlgo,
|
HashAlgorithm hashAlgo,
|
||||||
PathFilter & filter,
|
PathFilter & filter,
|
||||||
RepairFlag repair,
|
RepairFlag repair,
|
||||||
const StorePathSet & references) override
|
const StorePathSet & references) override
|
||||||
|
@ -1321,7 +1321,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
HashType hashAlgo,
|
HashAlgorithm hashAlgo,
|
||||||
RepairFlag repair,
|
RepairFlag repair,
|
||||||
const StorePathSet & references) override
|
const StorePathSet & references) override
|
||||||
{
|
{
|
||||||
|
@ -2466,7 +2466,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
rewriteOutput(outputRewrites);
|
rewriteOutput(outputRewrites);
|
||||||
/* FIXME optimize and deduplicate with addToStore */
|
/* FIXME optimize and deduplicate with addToStore */
|
||||||
std::string oldHashPart { scratchPath->hashPart() };
|
std::string oldHashPart { scratchPath->hashPart() };
|
||||||
HashModuloSink caSink { outputHash.hashType, oldHashPart };
|
HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const TextIngestionMethod &) {
|
[&](const TextIngestionMethod &) {
|
||||||
readFile(actualPath, caSink);
|
readFile(actualPath, caSink);
|
||||||
|
@ -2511,7 +2511,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
std::string(newInfo0.path.hashPart())}});
|
std::string(newInfo0.path.hashPart())}});
|
||||||
}
|
}
|
||||||
|
|
||||||
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
|
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||||
newInfo0.narHash = narHashAndSize.first;
|
newInfo0.narHash = narHashAndSize.first;
|
||||||
newInfo0.narSize = narHashAndSize.second;
|
newInfo0.narSize = narHashAndSize.second;
|
||||||
|
|
||||||
|
@ -2531,7 +2531,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
std::string { scratchPath->hashPart() },
|
std::string { scratchPath->hashPart() },
|
||||||
std::string { requiredFinalPath.hashPart() });
|
std::string { requiredFinalPath.hashPart() });
|
||||||
rewriteOutput(outputRewrites);
|
rewriteOutput(outputRewrites);
|
||||||
auto narHashAndSize = hashPath(htSHA256, actualPath);
|
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||||
newInfo0.narSize = narHashAndSize.second;
|
newInfo0.narSize = narHashAndSize.second;
|
||||||
auto refs = rewriteRefs();
|
auto refs = rewriteRefs();
|
||||||
|
@ -2546,7 +2546,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
|
|
||||||
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
|
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
|
||||||
.method = dof.ca.method,
|
.method = dof.ca.method,
|
||||||
.hashType = wanted.type,
|
.hashAlgo = wanted.algo,
|
||||||
});
|
});
|
||||||
|
|
||||||
/* Check wanted hash */
|
/* Check wanted hash */
|
||||||
|
@ -2583,7 +2583,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
return newInfoFromCA(DerivationOutput::CAFloating {
|
return newInfoFromCA(DerivationOutput::CAFloating {
|
||||||
.method = doi.method,
|
.method = doi.method,
|
||||||
.hashType = doi.hashType,
|
.hashAlgo = doi.hashAlgo,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -2945,7 +2945,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
|
||||||
{
|
{
|
||||||
return worker.store.makeStorePath(
|
return worker.store.makeStorePath(
|
||||||
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
|
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
|
||||||
Hash(htSHA256), outputPathName(drv->name, outputName));
|
Hash(HashAlgorithm::SHA256), outputPathName(drv->name, outputName));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2953,7 +2953,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path)
|
||||||
{
|
{
|
||||||
return worker.store.makeStorePath(
|
return worker.store.makeStorePath(
|
||||||
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
|
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
|
||||||
Hash(htSHA256), path.name());
|
Hash(HashAlgorithm::SHA256), path.name());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -519,8 +519,8 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||||
if (!pathExists(store.printStorePath(path)))
|
if (!pathExists(store.printStorePath(path)))
|
||||||
res = false;
|
res = false;
|
||||||
else {
|
else {
|
||||||
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
|
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
|
||||||
Hash nullHash(htSHA256);
|
Hash nullHash(HashAlgorithm::SHA256);
|
||||||
res = info->narHash == nullHash || info->narHash == current.first;
|
res = info->narHash == nullHash || info->narHash == current.first;
|
||||||
}
|
}
|
||||||
pathContentsGoodCache.insert_or_assign(path, res);
|
pathContentsGoodCache.insert_or_assign(path, res);
|
||||||
|
|
|
@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
for (auto hashedMirror : settings.hashedMirrors.get())
|
for (auto hashedMirror : settings.hashedMirrors.get())
|
||||||
try {
|
try {
|
||||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||||
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
std::optional<HashAlgorithm> ht = parseHashAlgoOpt(getAttr("outputHashAlgo"));
|
||||||
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
||||||
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false));
|
fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false));
|
||||||
return;
|
return;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
debug(e.what());
|
debug(e.what());
|
||||||
|
|
|
@ -38,14 +38,14 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
|
||||||
return FileIngestionMethod::Flat;
|
return FileIngestionMethod::Flat;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string ContentAddressMethod::render(HashType ht) const
|
std::string ContentAddressMethod::render(HashAlgorithm ha) const
|
||||||
{
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[&](const TextIngestionMethod & th) {
|
[&](const TextIngestionMethod & th) {
|
||||||
return std::string{"text:"} + printHashType(ht);
|
return std::string{"text:"} + printHashAlgo(ha);
|
||||||
},
|
},
|
||||||
[&](const FileIngestionMethod & fim) {
|
[&](const FileIngestionMethod & fim) {
|
||||||
return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht);
|
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
|
||||||
}
|
}
|
||||||
}, raw);
|
}, raw);
|
||||||
}
|
}
|
||||||
|
@ -61,13 +61,13 @@ std::string ContentAddress::render() const
|
||||||
+ makeFileIngestionPrefix(method);
|
+ makeFileIngestionPrefix(method);
|
||||||
},
|
},
|
||||||
}, method.raw)
|
}, method.raw)
|
||||||
+ this->hash.to_string(HashFormat::Base32, true);
|
+ this->hash.to_string(HashFormat::Nix32, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses content address strings up to the hash.
|
* Parses content address strings up to the hash.
|
||||||
*/
|
*/
|
||||||
static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix(std::string_view & rest)
|
static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodPrefix(std::string_view & rest)
|
||||||
{
|
{
|
||||||
std::string_view wholeInput { rest };
|
std::string_view wholeInput { rest };
|
||||||
|
|
||||||
|
@ -83,27 +83,27 @@ static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix
|
||||||
auto hashTypeRaw = splitPrefixTo(rest, ':');
|
auto hashTypeRaw = splitPrefixTo(rest, ':');
|
||||||
if (!hashTypeRaw)
|
if (!hashTypeRaw)
|
||||||
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
|
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
|
||||||
HashType hashType = parseHashType(*hashTypeRaw);
|
HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw);
|
||||||
return hashType;
|
return hashAlgo;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Switch on prefix
|
// Switch on prefix
|
||||||
if (prefix == "text") {
|
if (prefix == "text") {
|
||||||
// No parsing of the ingestion method, "text" only support flat.
|
// No parsing of the ingestion method, "text" only support flat.
|
||||||
HashType hashType = parseHashType_();
|
HashAlgorithm hashAlgo = parseHashType_();
|
||||||
return {
|
return {
|
||||||
TextIngestionMethod {},
|
TextIngestionMethod {},
|
||||||
std::move(hashType),
|
std::move(hashAlgo),
|
||||||
};
|
};
|
||||||
} else if (prefix == "fixed") {
|
} else if (prefix == "fixed") {
|
||||||
// Parse method
|
// Parse method
|
||||||
auto method = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
if (splitPrefix(rest, "r:"))
|
if (splitPrefix(rest, "r:"))
|
||||||
method = FileIngestionMethod::Recursive;
|
method = FileIngestionMethod::Recursive;
|
||||||
HashType hashType = parseHashType_();
|
HashAlgorithm hashAlgo = parseHashType_();
|
||||||
return {
|
return {
|
||||||
std::move(method),
|
std::move(method),
|
||||||
std::move(hashType),
|
std::move(hashAlgo),
|
||||||
};
|
};
|
||||||
} else
|
} else
|
||||||
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
|
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
|
||||||
|
@ -113,15 +113,15 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
|
||||||
{
|
{
|
||||||
auto rest = rawCa;
|
auto rest = rawCa;
|
||||||
|
|
||||||
auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest);
|
auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest);
|
||||||
|
|
||||||
return ContentAddress {
|
return ContentAddress {
|
||||||
.method = std::move(caMethod),
|
.method = std::move(caMethod),
|
||||||
.hash = Hash::parseNonSRIUnprefixed(rest, hashType),
|
.hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<ContentAddressMethod, HashType> ContentAddressMethod::parse(std::string_view caMethod)
|
std::pair<ContentAddressMethod, HashAlgorithm> ContentAddressMethod::parse(std::string_view caMethod)
|
||||||
{
|
{
|
||||||
std::string asPrefix = std::string{caMethod} + ":";
|
std::string asPrefix = std::string{caMethod} + ":";
|
||||||
// parseContentAddressMethodPrefix takes its argument by reference
|
// parseContentAddressMethodPrefix takes its argument by reference
|
||||||
|
@ -144,7 +144,7 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
|
||||||
std::string ContentAddress::printMethodAlgo() const
|
std::string ContentAddress::printMethodAlgo() const
|
||||||
{
|
{
|
||||||
return method.renderPrefix()
|
return method.renderPrefix()
|
||||||
+ printHashType(hash.type);
|
+ printHashAlgo(hash.algo);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool StoreReferences::empty() const
|
bool StoreReferences::empty() const
|
||||||
|
|
|
@ -94,7 +94,7 @@ struct ContentAddressMethod
|
||||||
/**
|
/**
|
||||||
* Parse a content addressing method and hash type.
|
* Parse a content addressing method and hash type.
|
||||||
*/
|
*/
|
||||||
static std::pair<ContentAddressMethod, HashType> parse(std::string_view rawCaMethod);
|
static std::pair<ContentAddressMethod, HashAlgorithm> parse(std::string_view rawCaMethod);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Render a content addressing method and hash type in a
|
* Render a content addressing method and hash type in a
|
||||||
|
@ -102,7 +102,7 @@ struct ContentAddressMethod
|
||||||
*
|
*
|
||||||
* The rough inverse of `parse()`.
|
* The rough inverse of `parse()`.
|
||||||
*/
|
*/
|
||||||
std::string render(HashType ht) const;
|
std::string render(HashAlgorithm ha) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -400,22 +400,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto pathInfo = [&]() {
|
auto pathInfo = [&]() {
|
||||||
// NB: FramedSource must be out of scope before logger->stopWork();
|
// NB: FramedSource must be out of scope before logger->stopWork();
|
||||||
auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr);
|
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
|
||||||
auto hashType = hashType_; // work around clang bug
|
auto hashAlgo = hashAlgo_; // work around clang bug
|
||||||
FramedSource source(from);
|
FramedSource source(from);
|
||||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[&](const TextIngestionMethod &) {
|
[&](const TextIngestionMethod &) {
|
||||||
if (hashType != htSHA256)
|
if (hashAlgo != HashAlgorithm::SHA256)
|
||||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||||
name, printHashType(hashType));
|
name, printHashAlgo(hashAlgo));
|
||||||
// We could stream this by changing Store
|
// We could stream this by changing Store
|
||||||
std::string contents = source.drain();
|
std::string contents = source.drain();
|
||||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||||
return store->queryPathInfo(path);
|
return store->queryPathInfo(path);
|
||||||
},
|
},
|
||||||
[&](const FileIngestionMethod & fim) {
|
[&](const FileIngestionMethod & fim) {
|
||||||
auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs);
|
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
|
||||||
return store->queryPathInfo(path);
|
return store->queryPathInfo(path);
|
||||||
},
|
},
|
||||||
}, contentAddressMethod.raw);
|
}, contentAddressMethod.raw);
|
||||||
|
@ -424,7 +424,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
|
|
||||||
WorkerProto::Serialise<ValidPathInfo>::write(*store, wconn, *pathInfo);
|
WorkerProto::Serialise<ValidPathInfo>::write(*store, wconn, *pathInfo);
|
||||||
} else {
|
} else {
|
||||||
HashType hashAlgo;
|
HashAlgorithm hashAlgo;
|
||||||
std::string baseName;
|
std::string baseName;
|
||||||
FileIngestionMethod method;
|
FileIngestionMethod method;
|
||||||
{
|
{
|
||||||
|
@ -440,7 +440,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
hashAlgoRaw = "sha256";
|
hashAlgoRaw = "sha256";
|
||||||
method = FileIngestionMethod::Recursive;
|
method = FileIngestionMethod::Recursive;
|
||||||
}
|
}
|
||||||
hashAlgo = parseHashType(hashAlgoRaw);
|
hashAlgo = parseHashAlgo(hashAlgoRaw);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto dumpSource = sinkToSource([&](Sink & saved) {
|
auto dumpSource = sinkToSource([&](Sink & saved) {
|
||||||
|
@ -574,6 +574,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
case WorkerProto::Op::BuildDerivation: {
|
case WorkerProto::Op::BuildDerivation: {
|
||||||
auto drvPath = store->parseStorePath(readString(from));
|
auto drvPath = store->parseStorePath(readString(from));
|
||||||
BasicDerivation drv;
|
BasicDerivation drv;
|
||||||
|
/*
|
||||||
|
* Note: unlike wopEnsurePath, this operation reads a
|
||||||
|
* derivation-to-be-realized from the client with
|
||||||
|
* readDerivation(Source,Store) rather than reading it from
|
||||||
|
* the local store with Store::readDerivation(). Since the
|
||||||
|
* derivation-to-be-realized is not registered in the store
|
||||||
|
* it cannot be trusted that its outPath was calculated
|
||||||
|
* correctly.
|
||||||
|
*/
|
||||||
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
|
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
|
||||||
BuildMode buildMode = (BuildMode) readInt(from);
|
BuildMode buildMode = (BuildMode) readInt(from);
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
|
@ -883,7 +892,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
bool repair, dontCheckSigs;
|
bool repair, dontCheckSigs;
|
||||||
auto path = store->parseStorePath(readString(from));
|
auto path = store->parseStorePath(readString(from));
|
||||||
auto deriver = readString(from);
|
auto deriver = readString(from);
|
||||||
auto narHash = Hash::parseAny(readString(from), htSHA256);
|
auto narHash = Hash::parseAny(readString(from), HashAlgorithm::SHA256);
|
||||||
ValidPathInfo info { path, narHash };
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
|
|
|
@ -215,25 +215,25 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
|
||||||
|
|
||||||
static DerivationOutput parseDerivationOutput(
|
static DerivationOutput parseDerivationOutput(
|
||||||
const StoreDirConfig & store,
|
const StoreDirConfig & store,
|
||||||
std::string_view pathS, std::string_view hashAlgo, std::string_view hashS,
|
std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS,
|
||||||
const ExperimentalFeatureSettings & xpSettings)
|
const ExperimentalFeatureSettings & xpSettings)
|
||||||
{
|
{
|
||||||
if (hashAlgo != "") {
|
if (hashAlgoStr != "") {
|
||||||
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo);
|
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
|
||||||
if (method == TextIngestionMethod {})
|
if (method == TextIngestionMethod {})
|
||||||
xpSettings.require(Xp::DynamicDerivations);
|
xpSettings.require(Xp::DynamicDerivations);
|
||||||
const auto hashType = parseHashType(hashAlgo);
|
const auto hashAlgo = parseHashAlgo(hashAlgoStr);
|
||||||
if (hashS == "impure") {
|
if (hashS == "impure") {
|
||||||
xpSettings.require(Xp::ImpureDerivations);
|
xpSettings.require(Xp::ImpureDerivations);
|
||||||
if (pathS != "")
|
if (pathS != "")
|
||||||
throw FormatError("impure derivation output should not specify output path");
|
throw FormatError("impure derivation output should not specify output path");
|
||||||
return DerivationOutput::Impure {
|
return DerivationOutput::Impure {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hashType = std::move(hashType),
|
.hashAlgo = std::move(hashAlgo),
|
||||||
};
|
};
|
||||||
} else if (hashS != "") {
|
} else if (hashS != "") {
|
||||||
validatePath(pathS);
|
validatePath(pathS);
|
||||||
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType);
|
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo);
|
||||||
return DerivationOutput::CAFixed {
|
return DerivationOutput::CAFixed {
|
||||||
.ca = ContentAddress {
|
.ca = ContentAddress {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
|
@ -246,7 +246,7 @@ static DerivationOutput parseDerivationOutput(
|
||||||
throw FormatError("content-addressed derivation output should not specify output path");
|
throw FormatError("content-addressed derivation output should not specify output path");
|
||||||
return DerivationOutput::CAFloating {
|
return DerivationOutput::CAFloating {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hashType = std::move(hashType),
|
.hashAlgo = std::move(hashAlgo),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -547,7 +547,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
s += ','; printUnquotedString(s, "");
|
s += ','; printUnquotedString(s, "");
|
||||||
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType));
|
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
|
||||||
s += ','; printUnquotedString(s, "");
|
s += ','; printUnquotedString(s, "");
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Deferred &) {
|
[&](const DerivationOutput::Deferred &) {
|
||||||
|
@ -558,7 +558,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
// FIXME
|
// FIXME
|
||||||
s += ','; printUnquotedString(s, "");
|
s += ','; printUnquotedString(s, "");
|
||||||
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType));
|
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
|
||||||
s += ','; printUnquotedString(s, "impure");
|
s += ','; printUnquotedString(s, "impure");
|
||||||
}
|
}
|
||||||
}, i.second.raw);
|
}, i.second.raw);
|
||||||
|
@ -631,7 +631,7 @@ DerivationType BasicDerivation::type() const
|
||||||
floatingCAOutputs,
|
floatingCAOutputs,
|
||||||
deferredIAOutputs,
|
deferredIAOutputs,
|
||||||
impureOutputs;
|
impureOutputs;
|
||||||
std::optional<HashType> floatingHashType;
|
std::optional<HashAlgorithm> floatingHashAlgo;
|
||||||
|
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
|
@ -643,10 +643,10 @@ DerivationType BasicDerivation::type() const
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
floatingCAOutputs.insert(i.first);
|
floatingCAOutputs.insert(i.first);
|
||||||
if (!floatingHashType) {
|
if (!floatingHashAlgo) {
|
||||||
floatingHashType = dof.hashType;
|
floatingHashAlgo = dof.hashAlgo;
|
||||||
} else {
|
} else {
|
||||||
if (*floatingHashType != dof.hashType)
|
if (*floatingHashAlgo != dof.hashAlgo)
|
||||||
throw Error("all floating outputs must use the same hash type");
|
throw Error("all floating outputs must use the same hash type");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -774,7 +774,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||||
std::map<std::string, Hash> outputHashes;
|
std::map<std::string, Hash> outputHashes;
|
||||||
for (const auto & i : drv.outputs) {
|
for (const auto & i : drv.outputs) {
|
||||||
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
|
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
|
||||||
auto hash = hashString(htSHA256, "fixed:out:"
|
auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:"
|
||||||
+ dof.ca.printMethodAlgo() + ":"
|
+ dof.ca.printMethodAlgo() + ":"
|
||||||
+ dof.ca.hash.to_string(HashFormat::Base16, false) + ":"
|
+ dof.ca.hash.to_string(HashFormat::Base16, false) + ":"
|
||||||
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
||||||
|
@ -825,7 +825,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
|
auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||||
|
|
||||||
std::map<std::string, Hash> outputHashes;
|
std::map<std::string, Hash> outputHashes;
|
||||||
for (const auto & [outputName, _] : drv.outputs) {
|
for (const auto & [outputName, _] : drv.outputs) {
|
||||||
|
@ -930,7 +930,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
out << ""
|
out << ""
|
||||||
<< (dof.method.renderPrefix() + printHashType(dof.hashType))
|
<< (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
|
||||||
<< "";
|
<< "";
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Deferred &) {
|
[&](const DerivationOutput::Deferred &) {
|
||||||
|
@ -940,7 +940,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
out << ""
|
out << ""
|
||||||
<< (doi.method.renderPrefix() + printHashType(doi.hashType))
|
<< (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
|
||||||
<< "impure";
|
<< "impure";
|
||||||
},
|
},
|
||||||
}, i.second.raw);
|
}, i.second.raw);
|
||||||
|
@ -958,7 +958,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
|
||||||
std::string hashPlaceholder(const OutputNameView outputName)
|
std::string hashPlaceholder(const OutputNameView outputName)
|
||||||
{
|
{
|
||||||
// FIXME: memoize?
|
// FIXME: memoize?
|
||||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false);
|
return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1002,13 +1002,13 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) const
|
std::optional<BasicDerivation> Derivation::tryResolve(Store & store, Store * evalStore) const
|
||||||
{
|
{
|
||||||
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
||||||
|
|
||||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accum;
|
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accum;
|
||||||
accum = [&](auto & inputDrv, auto & node) {
|
accum = [&](auto & inputDrv, auto & node) {
|
||||||
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv)) {
|
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv, evalStore)) {
|
||||||
if (outputPath) {
|
if (outputPath) {
|
||||||
inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath);
|
inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath);
|
||||||
if (auto p = get(node.childMap, outputName))
|
if (auto p = get(node.childMap, outputName))
|
||||||
|
@ -1150,7 +1150,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const Hash impureOutputHash = hashString(htSHA256, "impure");
|
const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure");
|
||||||
|
|
||||||
nlohmann::json DerivationOutput::toJSON(
|
nlohmann::json DerivationOutput::toJSON(
|
||||||
const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
|
const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
|
||||||
|
@ -1167,11 +1167,11 @@ nlohmann::json DerivationOutput::toJSON(
|
||||||
// FIXME print refs?
|
// FIXME print refs?
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::CAFloating & dof) {
|
[&](const DerivationOutput::CAFloating & dof) {
|
||||||
res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType);
|
res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
|
||||||
},
|
},
|
||||||
[&](const DerivationOutput::Deferred &) {},
|
[&](const DerivationOutput::Deferred &) {},
|
||||||
[&](const DerivationOutput::Impure & doi) {
|
[&](const DerivationOutput::Impure & doi) {
|
||||||
res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType);
|
res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
|
||||||
res["impure"] = true;
|
res["impure"] = true;
|
||||||
},
|
},
|
||||||
}, raw);
|
}, raw);
|
||||||
|
@ -1191,15 +1191,15 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
for (const auto & [key, _] : json)
|
for (const auto & [key, _] : json)
|
||||||
keys.insert(key);
|
keys.insert(key);
|
||||||
|
|
||||||
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashType> {
|
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
|
||||||
std::string hashAlgo = json["hashAlgo"];
|
std::string hashAlgoStr = json["hashAlgo"];
|
||||||
// remaining to parse, will be mutated by parsers
|
// remaining to parse, will be mutated by parsers
|
||||||
std::string_view s = hashAlgo;
|
std::string_view s = hashAlgoStr;
|
||||||
ContentAddressMethod method = ContentAddressMethod::parsePrefix(s);
|
ContentAddressMethod method = ContentAddressMethod::parsePrefix(s);
|
||||||
if (method == TextIngestionMethod {})
|
if (method == TextIngestionMethod {})
|
||||||
xpSettings.require(Xp::DynamicDerivations);
|
xpSettings.require(Xp::DynamicDerivations);
|
||||||
auto hashType = parseHashType(s);
|
auto hashAlgo = parseHashAlgo(s);
|
||||||
return { std::move(method), std::move(hashType) };
|
return { std::move(method), std::move(hashAlgo) };
|
||||||
};
|
};
|
||||||
|
|
||||||
if (keys == (std::set<std::string_view> { "path" })) {
|
if (keys == (std::set<std::string_view> { "path" })) {
|
||||||
|
@ -1209,11 +1209,11 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
|
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
|
||||||
auto [method, hashType] = methodAlgo();
|
auto [method, hashAlgo] = methodAlgo();
|
||||||
auto dof = DerivationOutput::CAFixed {
|
auto dof = DerivationOutput::CAFixed {
|
||||||
.ca = ContentAddress {
|
.ca = ContentAddress {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType),
|
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashAlgo),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
|
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
|
||||||
|
@ -1223,10 +1223,10 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
|
|
||||||
else if (keys == (std::set<std::string_view> { "hashAlgo" })) {
|
else if (keys == (std::set<std::string_view> { "hashAlgo" })) {
|
||||||
xpSettings.require(Xp::CaDerivations);
|
xpSettings.require(Xp::CaDerivations);
|
||||||
auto [method, hashType] = methodAlgo();
|
auto [method, hashAlgo] = methodAlgo();
|
||||||
return DerivationOutput::CAFloating {
|
return DerivationOutput::CAFloating {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hashType = std::move(hashType),
|
.hashAlgo = std::move(hashAlgo),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1236,10 +1236,10 @@ DerivationOutput DerivationOutput::fromJSON(
|
||||||
|
|
||||||
else if (keys == (std::set<std::string_view> { "hashAlgo", "impure" })) {
|
else if (keys == (std::set<std::string_view> { "hashAlgo", "impure" })) {
|
||||||
xpSettings.require(Xp::ImpureDerivations);
|
xpSettings.require(Xp::ImpureDerivations);
|
||||||
auto [method, hashType] = methodAlgo();
|
auto [method, hashAlgo] = methodAlgo();
|
||||||
return DerivationOutput::Impure {
|
return DerivationOutput::Impure {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hashType = hashType,
|
.hashAlgo = hashAlgo,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,9 +75,9 @@ struct DerivationOutput
|
||||||
/**
|
/**
|
||||||
* How the serialization will be hashed
|
* How the serialization will be hashed
|
||||||
*/
|
*/
|
||||||
HashType hashType;
|
HashAlgorithm hashAlgo;
|
||||||
|
|
||||||
GENERATE_CMP(CAFloating, me->method, me->hashType);
|
GENERATE_CMP(CAFloating, me->method, me->hashAlgo);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -102,9 +102,9 @@ struct DerivationOutput
|
||||||
/**
|
/**
|
||||||
* How the serialization will be hashed
|
* How the serialization will be hashed
|
||||||
*/
|
*/
|
||||||
HashType hashType;
|
HashAlgorithm hashAlgo;
|
||||||
|
|
||||||
GENERATE_CMP(Impure, me->method, me->hashType);
|
GENERATE_CMP(Impure, me->method, me->hashAlgo);
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::variant<
|
typedef std::variant<
|
||||||
|
@ -342,7 +342,7 @@ struct Derivation : BasicDerivation
|
||||||
* 2. Input placeholders are replaced with realized input store
|
* 2. Input placeholders are replaced with realized input store
|
||||||
* paths.
|
* paths.
|
||||||
*/
|
*/
|
||||||
std::optional<BasicDerivation> tryResolve(Store & store) const;
|
std::optional<BasicDerivation> tryResolve(Store & store, Store * evalStore = nullptr) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Like the above, but instead of querying the Nix database for
|
* Like the above, but instead of querying the Nix database for
|
||||||
|
|
|
@ -5,7 +5,7 @@ namespace nix {
|
||||||
|
|
||||||
std::string DownstreamPlaceholder::render() const
|
std::string DownstreamPlaceholder::render() const
|
||||||
{
|
{
|
||||||
return "/" + hash.to_string(HashFormat::Base32, false);
|
return "/" + hash.to_string(HashFormat::Nix32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
|
||||||
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
|
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
|
||||||
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
|
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
|
||||||
return DownstreamPlaceholder {
|
return DownstreamPlaceholder {
|
||||||
hashString(htSHA256, clearText)
|
hashString(HashAlgorithm::SHA256, clearText)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
|
||||||
xpSettings.require(Xp::DynamicDerivations);
|
xpSettings.require(Xp::DynamicDerivations);
|
||||||
auto compressed = compressHash(placeholder.hash, 20);
|
auto compressed = compressHash(placeholder.hash, 20);
|
||||||
auto clearText = "nix-computed-output:"
|
auto clearText = "nix-computed-output:"
|
||||||
+ compressed.to_string(HashFormat::Base32, false)
|
+ compressed.to_string(HashFormat::Nix32, false)
|
||||||
+ ":" + std::string { outputName };
|
+ ":" + std::string { outputName };
|
||||||
return DownstreamPlaceholder {
|
return DownstreamPlaceholder {
|
||||||
hashString(htSHA256, clearText)
|
hashString(HashAlgorithm::SHA256, clearText)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
{
|
{
|
||||||
auto info = queryPathInfo(path);
|
auto info = queryPathInfo(path);
|
||||||
|
|
||||||
HashSink hashSink(htSHA256);
|
HashSink hashSink(HashAlgorithm::SHA256);
|
||||||
TeeSink teeSink(sink, hashSink);
|
TeeSink teeSink(sink, hashSink);
|
||||||
|
|
||||||
narFromPath(path, teeSink);
|
narFromPath(path, teeSink);
|
||||||
|
@ -39,9 +39,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
filesystem corruption from spreading to other machines.
|
filesystem corruption from spreading to other machines.
|
||||||
Don't complain if the stored hash is zero (unknown). */
|
Don't complain if the stored hash is zero (unknown). */
|
||||||
Hash hash = hashSink.currentHash().first;
|
Hash hash = hashSink.currentHash().first;
|
||||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
if (hash != info->narHash && info->narHash != Hash(info->narHash.algo))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true));
|
printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true));
|
||||||
|
|
||||||
teeSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
|
@ -79,7 +79,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
|
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
|
||||||
CommonProto::ReadConn { .from = source });
|
CommonProto::ReadConn { .from = source });
|
||||||
auto deriver = readString(source);
|
auto deriver = readString(source);
|
||||||
auto narHash = hashString(htSHA256, saved.s);
|
auto narHash = hashString(HashAlgorithm::SHA256, saved.s);
|
||||||
|
|
||||||
ValidPathInfo info { path, narHash };
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
|
|
|
@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target)
|
||||||
|
|
||||||
void LocalStore::addIndirectRoot(const Path & path)
|
void LocalStore::addIndirectRoot(const Path & path)
|
||||||
{
|
{
|
||||||
std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false);
|
std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false);
|
||||||
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
|
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
|
||||||
makeSymlink(realRoot, path);
|
makeSymlink(realRoot, path);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "legacy-ssh-store.hh"
|
||||||
#include "ssh-store-config.hh"
|
#include "ssh-store-config.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "pool.hh"
|
#include "pool.hh"
|
||||||
|
@ -13,35 +14,16 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
|
std::string LegacySSHStoreConfig::doc()
|
||||||
{
|
{
|
||||||
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
|
||||||
|
|
||||||
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
|
|
||||||
"Path to the `nix-store` executable on the remote machine."};
|
|
||||||
|
|
||||||
const Setting<int> maxConnections{this, 1, "max-connections",
|
|
||||||
"Maximum number of concurrent SSH connections."};
|
|
||||||
|
|
||||||
const std::string name() override { return "SSH Store"; }
|
|
||||||
|
|
||||||
std::string doc() override
|
|
||||||
{
|
|
||||||
return
|
return
|
||||||
#include "legacy-ssh-store.md"
|
#include "legacy-ssh-store.md"
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
|
|
||||||
|
struct LegacySSHStore::Connection
|
||||||
{
|
{
|
||||||
// Hack for getting remote build log output.
|
|
||||||
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
|
|
||||||
// the documentation
|
|
||||||
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
|
|
||||||
|
|
||||||
struct Connection
|
|
||||||
{
|
|
||||||
std::unique_ptr<SSHMaster::Connection> sshConn;
|
std::unique_ptr<SSHMaster::Connection> sshConn;
|
||||||
FdSink to;
|
FdSink to;
|
||||||
FdSource from;
|
FdSource from;
|
||||||
|
@ -79,17 +61,10 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
.version = remoteVersion,
|
.version = remoteVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
std::string host;
|
|
||||||
|
|
||||||
ref<Pool<Connection>> connections;
|
LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
||||||
|
|
||||||
SSHMaster master;
|
|
||||||
|
|
||||||
static std::set<std::string> uriSchemes() { return {"ssh"}; }
|
|
||||||
|
|
||||||
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
|
||||||
: StoreConfig(params)
|
: StoreConfig(params)
|
||||||
, CommonSSHStoreConfig(params)
|
, CommonSSHStoreConfig(params)
|
||||||
, LegacySSHStoreConfig(params)
|
, LegacySSHStoreConfig(params)
|
||||||
|
@ -108,11 +83,12 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
connections->capacity() > 1,
|
connections->capacity() > 1,
|
||||||
compress,
|
compress,
|
||||||
logFD)
|
logFD)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<Connection> openConnection()
|
|
||||||
{
|
ref<LegacySSHStore::Connection> LegacySSHStore::openConnection()
|
||||||
|
{
|
||||||
auto conn = make_ref<Connection>();
|
auto conn = make_ref<Connection>();
|
||||||
conn->sshConn = master.startCommand(
|
conn->sshConn = master.startCommand(
|
||||||
fmt("%s --serve --write", remoteProgram)
|
fmt("%s --serve --write", remoteProgram)
|
||||||
|
@ -147,16 +123,18 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
}
|
}
|
||||||
|
|
||||||
return conn;
|
return conn;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::string getUri() override
|
|
||||||
{
|
std::string LegacySSHStore::getUri()
|
||||||
|
{
|
||||||
return *uriSchemes().begin() + "://" + host;
|
return *uriSchemes().begin() + "://" + host;
|
||||||
}
|
}
|
||||||
|
|
||||||
void queryPathInfoUncached(const StorePath & path,
|
|
||||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
|
void LegacySSHStore::queryPathInfoUncached(const StorePath & path,
|
||||||
{
|
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||||
|
{
|
||||||
try {
|
try {
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
|
||||||
|
@ -172,35 +150,24 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
if (p.empty()) return callback(nullptr);
|
if (p.empty()) return callback(nullptr);
|
||||||
auto path2 = parseStorePath(p);
|
auto path2 = parseStorePath(p);
|
||||||
assert(path == path2);
|
assert(path == path2);
|
||||||
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
|
auto info = std::make_shared<ValidPathInfo>(
|
||||||
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
|
path,
|
||||||
|
ServeProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
|
||||||
|
|
||||||
auto deriver = readString(conn->from);
|
if (info->narHash == Hash::dummy)
|
||||||
if (deriver != "")
|
|
||||||
info->deriver = parseStorePath(deriver);
|
|
||||||
info->references = ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
|
||||||
readLongLong(conn->from); // download size
|
|
||||||
info->narSize = readLongLong(conn->from);
|
|
||||||
|
|
||||||
{
|
|
||||||
auto s = readString(conn->from);
|
|
||||||
if (s == "")
|
|
||||||
throw Error("NAR hash is now mandatory");
|
throw Error("NAR hash is now mandatory");
|
||||||
info->narHash = Hash::parseAnyPrefixed(s);
|
|
||||||
}
|
|
||||||
info->ca = ContentAddress::parseOpt(readString(conn->from));
|
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
|
||||||
|
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
assert(s == "");
|
assert(s == "");
|
||||||
|
|
||||||
callback(std::move(info));
|
callback(std::move(info));
|
||||||
} catch (...) { callback.rethrow(); }
|
} catch (...) { callback.rethrow(); }
|
||||||
}
|
}
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & source,
|
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
{
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
|
{
|
||||||
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
|
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||||
|
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
@ -252,62 +219,35 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
|
|
||||||
if (readInt(conn->from) != 1)
|
if (readInt(conn->from) != 1)
|
||||||
throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host);
|
throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host);
|
||||||
}
|
}
|
||||||
|
|
||||||
void narFromPath(const StorePath & path, Sink & sink) override
|
|
||||||
{
|
void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink)
|
||||||
|
{
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
|
||||||
conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
|
conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
copyNAR(conn->from, sink);
|
copyNAR(conn->from, sink);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
|
||||||
{ unsupported("queryPathFromHashPart"); }
|
|
||||||
|
|
||||||
StorePath addToStore(
|
void LegacySSHStore::putBuildSettings(Connection & conn)
|
||||||
std::string_view name,
|
{
|
||||||
const Path & srcPath,
|
ServeProto::write(*this, conn, ServeProto::BuildOptions {
|
||||||
FileIngestionMethod method,
|
.maxSilentTime = settings.maxSilentTime,
|
||||||
HashType hashAlgo,
|
.buildTimeout = settings.buildTimeout,
|
||||||
PathFilter & filter,
|
.maxLogSize = settings.maxLogSize,
|
||||||
RepairFlag repair,
|
.nrRepeats = 0, // buildRepeat hasn't worked for ages anyway
|
||||||
const StorePathSet & references) override
|
.enforceDeterminism = 0,
|
||||||
{ unsupported("addToStore"); }
|
.keepFailed = settings.keepFailed,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
StorePath addTextToStore(
|
|
||||||
std::string_view name,
|
|
||||||
std::string_view s,
|
|
||||||
const StorePathSet & references,
|
|
||||||
RepairFlag repair) override
|
|
||||||
{ unsupported("addTextToStore"); }
|
|
||||||
|
|
||||||
private:
|
BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
|
BuildMode buildMode)
|
||||||
void putBuildSettings(Connection & conn)
|
{
|
||||||
{
|
|
||||||
conn.to
|
|
||||||
<< settings.maxSilentTime
|
|
||||||
<< settings.buildTimeout;
|
|
||||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
|
|
||||||
conn.to
|
|
||||||
<< settings.maxLogSize;
|
|
||||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
|
|
||||||
conn.to
|
|
||||||
<< 0 // buildRepeat hasn't worked for ages anyway
|
|
||||||
<< 0;
|
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
|
|
||||||
conn.to << ((int) settings.keepFailed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public:
|
|
||||||
|
|
||||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
|
||||||
BuildMode buildMode) override
|
|
||||||
{
|
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
|
||||||
conn->to
|
conn->to
|
||||||
|
@ -320,10 +260,11 @@ public:
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
|
|
||||||
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
|
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
|
||||||
}
|
}
|
||||||
|
|
||||||
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
|
||||||
{
|
void LegacySSHStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore)
|
||||||
|
{
|
||||||
if (evalStore && evalStore.get() != this)
|
if (evalStore && evalStore.get() != this)
|
||||||
throw Error("building on an SSH store is incompatible with '--eval-store'");
|
throw Error("building on an SSH store is incompatible with '--eval-store'");
|
||||||
|
|
||||||
|
@ -358,29 +299,13 @@ public:
|
||||||
conn->from >> result.errorMsg;
|
conn->from >> result.errorMsg;
|
||||||
throw Error(result.status, result.errorMsg);
|
throw Error(result.status, result.errorMsg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void ensurePath(const StorePath & path) override
|
|
||||||
{ unsupported("ensurePath"); }
|
|
||||||
|
|
||||||
virtual ref<SourceAccessor> getFSAccessor(bool requireValidPath) override
|
void LegacySSHStore::computeFSClosure(const StorePathSet & paths,
|
||||||
{ unsupported("getFSAccessor"); }
|
StorePathSet & out, bool flipDirection,
|
||||||
|
bool includeOutputs, bool includeDerivers)
|
||||||
/**
|
{
|
||||||
* The default instance would schedule the work on the client side, but
|
|
||||||
* for consistency with `buildPaths` and `buildDerivation` it should happen
|
|
||||||
* on the remote side.
|
|
||||||
*
|
|
||||||
* We make this fail for now so we can add implement this properly later
|
|
||||||
* without it being a breaking change.
|
|
||||||
*/
|
|
||||||
void repairPath(const StorePath & path) override
|
|
||||||
{ unsupported("repairPath"); }
|
|
||||||
|
|
||||||
void computeFSClosure(const StorePathSet & paths,
|
|
||||||
StorePathSet & out, bool flipDirection = false,
|
|
||||||
bool includeOutputs = false, bool includeDerivers = false) override
|
|
||||||
{
|
|
||||||
if (flipDirection || includeDerivers) {
|
if (flipDirection || includeDerivers) {
|
||||||
Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers);
|
Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers);
|
||||||
return;
|
return;
|
||||||
|
@ -396,11 +321,12 @@ public:
|
||||||
|
|
||||||
for (auto & i : ServeProto::Serialise<StorePathSet>::read(*this, *conn))
|
for (auto & i : ServeProto::Serialise<StorePathSet>::read(*this, *conn))
|
||||||
out.insert(i);
|
out.insert(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePathSet queryValidPaths(const StorePathSet & paths,
|
|
||||||
SubstituteFlag maybeSubstitute = NoSubstitute) override
|
StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths,
|
||||||
{
|
SubstituteFlag maybeSubstitute)
|
||||||
|
{
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
|
||||||
conn->to
|
conn->to
|
||||||
|
@ -411,33 +337,31 @@ public:
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
|
|
||||||
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||||
}
|
}
|
||||||
|
|
||||||
void connect() override
|
|
||||||
{
|
void LegacySSHStore::connect()
|
||||||
|
{
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned int getProtocol() override
|
|
||||||
{
|
unsigned int LegacySSHStore::getProtocol()
|
||||||
|
{
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
return conn->remoteVersion;
|
return conn->remoteVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
|
/**
|
||||||
* The legacy ssh protocol doesn't support checking for trusted-user.
|
* The legacy ssh protocol doesn't support checking for trusted-user.
|
||||||
* Try using ssh-ng:// instead if you want to know.
|
* Try using ssh-ng:// instead if you want to know.
|
||||||
*/
|
*/
|
||||||
std::optional<TrustedFlag> isTrustedClient() override
|
std::optional<TrustedFlag> isTrustedClient()
|
||||||
{
|
{
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
void queryRealisationUncached(const DrvOutput &,
|
|
||||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
|
||||||
// TODO: Implement
|
|
||||||
{ unsupported("queryRealisation"); }
|
|
||||||
};
|
|
||||||
|
|
||||||
static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
|
static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
|
||||||
|
|
||||||
|
|
132
src/libstore/legacy-ssh-store.hh
Normal file
132
src/libstore/legacy-ssh-store.hh
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "ssh-store-config.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "ssh.hh"
|
||||||
|
#include "callback.hh"
|
||||||
|
#include "pool.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
|
||||||
|
{
|
||||||
|
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
||||||
|
|
||||||
|
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
|
||||||
|
"Path to the `nix-store` executable on the remote machine."};
|
||||||
|
|
||||||
|
const Setting<int> maxConnections{this, 1, "max-connections",
|
||||||
|
"Maximum number of concurrent SSH connections."};
|
||||||
|
|
||||||
|
const std::string name() override { return "SSH Store"; }
|
||||||
|
|
||||||
|
std::string doc() override;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
|
||||||
|
{
|
||||||
|
// Hack for getting remote build log output.
|
||||||
|
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
|
||||||
|
// the documentation
|
||||||
|
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
|
||||||
|
|
||||||
|
struct Connection;
|
||||||
|
|
||||||
|
std::string host;
|
||||||
|
|
||||||
|
ref<Pool<Connection>> connections;
|
||||||
|
|
||||||
|
SSHMaster master;
|
||||||
|
|
||||||
|
static std::set<std::string> uriSchemes() { return {"ssh"}; }
|
||||||
|
|
||||||
|
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params);
|
||||||
|
|
||||||
|
ref<Connection> openConnection();
|
||||||
|
|
||||||
|
std::string getUri() override;
|
||||||
|
|
||||||
|
void queryPathInfoUncached(const StorePath & path,
|
||||||
|
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
|
||||||
|
|
||||||
|
void addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
|
|
||||||
|
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||||
|
|
||||||
|
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||||
|
{ unsupported("queryPathFromHashPart"); }
|
||||||
|
|
||||||
|
StorePath addToStore(
|
||||||
|
std::string_view name,
|
||||||
|
const Path & srcPath,
|
||||||
|
FileIngestionMethod method,
|
||||||
|
HashAlgorithm hashAlgo,
|
||||||
|
PathFilter & filter,
|
||||||
|
RepairFlag repair,
|
||||||
|
const StorePathSet & references) override
|
||||||
|
{ unsupported("addToStore"); }
|
||||||
|
|
||||||
|
StorePath addTextToStore(
|
||||||
|
std::string_view name,
|
||||||
|
std::string_view s,
|
||||||
|
const StorePathSet & references,
|
||||||
|
RepairFlag repair) override
|
||||||
|
{ unsupported("addTextToStore"); }
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
void putBuildSettings(Connection & conn);
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
|
BuildMode buildMode) override;
|
||||||
|
|
||||||
|
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
|
||||||
|
|
||||||
|
void ensurePath(const StorePath & path) override
|
||||||
|
{ unsupported("ensurePath"); }
|
||||||
|
|
||||||
|
virtual ref<SourceAccessor> getFSAccessor(bool requireValidPath) override
|
||||||
|
{ unsupported("getFSAccessor"); }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The default instance would schedule the work on the client side, but
|
||||||
|
* for consistency with `buildPaths` and `buildDerivation` it should happen
|
||||||
|
* on the remote side.
|
||||||
|
*
|
||||||
|
* We make this fail for now so we can add implement this properly later
|
||||||
|
* without it being a breaking change.
|
||||||
|
*/
|
||||||
|
void repairPath(const StorePath & path) override
|
||||||
|
{ unsupported("repairPath"); }
|
||||||
|
|
||||||
|
void computeFSClosure(const StorePathSet & paths,
|
||||||
|
StorePathSet & out, bool flipDirection = false,
|
||||||
|
bool includeOutputs = false, bool includeDerivers = false) override;
|
||||||
|
|
||||||
|
StorePathSet queryValidPaths(const StorePathSet & paths,
|
||||||
|
SubstituteFlag maybeSubstitute = NoSubstitute) override;
|
||||||
|
|
||||||
|
void connect() override;
|
||||||
|
|
||||||
|
unsigned int getProtocol() override;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The legacy ssh protocol doesn't support checking for trusted-user.
|
||||||
|
* Try using ssh-ng:// instead if you want to know.
|
||||||
|
*/
|
||||||
|
std::optional<TrustedFlag> isTrustedClient() override
|
||||||
|
{
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
void queryRealisationUncached(const DrvOutput &,
|
||||||
|
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||||
|
// TODO: Implement
|
||||||
|
{ unsupported("queryRealisation"); }
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -955,7 +955,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
|
|
||||||
for (auto & [_, i] : infos) {
|
for (auto & [_, i] : infos) {
|
||||||
assert(i.narHash.type == htSHA256);
|
assert(i.narHash.algo == HashAlgorithm::SHA256);
|
||||||
if (isValidPath_(*state, i.path))
|
if (isValidPath_(*state, i.path))
|
||||||
updatePathInfo(*state, i);
|
updatePathInfo(*state, i);
|
||||||
else
|
else
|
||||||
|
@ -1069,7 +1069,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
/* While restoring the path from the NAR, compute the hash
|
/* While restoring the path from the NAR, compute the hash
|
||||||
of the NAR. */
|
of the NAR. */
|
||||||
HashSink hashSink(htSHA256);
|
HashSink hashSink(HashAlgorithm::SHA256);
|
||||||
|
|
||||||
TeeSource wrapperSource { source, hashSink };
|
TeeSource wrapperSource { source, hashSink };
|
||||||
|
|
||||||
|
@ -1080,7 +1080,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
if (hashResult.first != info.narHash)
|
if (hashResult.first != info.narHash)
|
||||||
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||||
printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true));
|
printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true));
|
||||||
|
|
||||||
if (hashResult.second != info.narSize)
|
if (hashResult.second != info.narSize)
|
||||||
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
|
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||||
|
@ -1090,14 +1090,14 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
auto & specified = *info.ca;
|
auto & specified = *info.ca;
|
||||||
auto actualHash = hashCAPath(
|
auto actualHash = hashCAPath(
|
||||||
specified.method,
|
specified.method,
|
||||||
specified.hash.type,
|
specified.hash.algo,
|
||||||
info.path
|
info.path
|
||||||
);
|
);
|
||||||
if (specified.hash != actualHash.hash) {
|
if (specified.hash != actualHash.hash) {
|
||||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||||
printStorePath(info.path),
|
printStorePath(info.path),
|
||||||
specified.hash.to_string(HashFormat::Base32, true),
|
specified.hash.to_string(HashFormat::Nix32, true),
|
||||||
actualHash.hash.to_string(HashFormat::Base32, true));
|
actualHash.hash.to_string(HashFormat::Nix32, true));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1116,7 +1116,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
|
|
||||||
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
|
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
|
||||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||||
{
|
{
|
||||||
/* For computing the store path. */
|
/* For computing the store path. */
|
||||||
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
||||||
|
@ -1220,8 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||||
/* For computing the nar hash. In recursive SHA-256 mode, this
|
/* For computing the nar hash. In recursive SHA-256 mode, this
|
||||||
is the same as the store hash, so no need to do it again. */
|
is the same as the store hash, so no need to do it again. */
|
||||||
auto narHash = std::pair { hash, size };
|
auto narHash = std::pair { hash, size };
|
||||||
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
|
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
|
||||||
HashSink narSink { htSHA256 };
|
HashSink narSink { HashAlgorithm::SHA256 };
|
||||||
dumpPath(realPath, narSink);
|
dumpPath(realPath, narSink);
|
||||||
narHash = narSink.finish();
|
narHash = narSink.finish();
|
||||||
}
|
}
|
||||||
|
@ -1252,7 +1252,7 @@ StorePath LocalStore::addTextToStore(
|
||||||
std::string_view s,
|
std::string_view s,
|
||||||
const StorePathSet & references, RepairFlag repair)
|
const StorePathSet & references, RepairFlag repair)
|
||||||
{
|
{
|
||||||
auto hash = hashString(htSHA256, s);
|
auto hash = hashString(HashAlgorithm::SHA256, s);
|
||||||
auto dstPath = makeTextPath(name, TextInfo {
|
auto dstPath = makeTextPath(name, TextInfo {
|
||||||
.hash = hash,
|
.hash = hash,
|
||||||
.references = references,
|
.references = references,
|
||||||
|
@ -1278,7 +1278,7 @@ StorePath LocalStore::addTextToStore(
|
||||||
|
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(s, sink);
|
dumpString(s, sink);
|
||||||
auto narHash = hashString(htSHA256, sink.s);
|
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
|
||||||
|
|
||||||
optimisePath(realPath, repair);
|
optimisePath(realPath, repair);
|
||||||
|
|
||||||
|
@ -1389,7 +1389,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
for (auto & link : readDirectory(linksDir)) {
|
for (auto & link : readDirectory(linksDir)) {
|
||||||
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||||
Path linkPath = linksDir + "/" + link.name;
|
Path linkPath = linksDir + "/" + link.name;
|
||||||
std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false);
|
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
|
||||||
if (hash != link.name) {
|
if (hash != link.name) {
|
||||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||||
linkPath, link.name, hash);
|
linkPath, link.name, hash);
|
||||||
|
@ -1406,7 +1406,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
printInfo("checking store hashes...");
|
printInfo("checking store hashes...");
|
||||||
|
|
||||||
Hash nullHash(htSHA256);
|
Hash nullHash(HashAlgorithm::SHA256);
|
||||||
|
|
||||||
for (auto & i : validPaths) {
|
for (auto & i : validPaths) {
|
||||||
try {
|
try {
|
||||||
|
@ -1415,14 +1415,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
/* Check the content hash (optionally - slow). */
|
/* Check the content hash (optionally - slow). */
|
||||||
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
||||||
|
|
||||||
auto hashSink = HashSink(info->narHash.type);
|
auto hashSink = HashSink(info->narHash.algo);
|
||||||
|
|
||||||
dumpPath(Store::toRealPath(i), hashSink);
|
dumpPath(Store::toRealPath(i), hashSink);
|
||||||
auto current = hashSink.finish();
|
auto current = hashSink.finish();
|
||||||
|
|
||||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true));
|
printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true));
|
||||||
if (repair) repairPath(i); else errors = true;
|
if (repair) repairPath(i); else errors = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
|
@ -1697,20 +1697,20 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
|
||||||
}
|
}
|
||||||
|
|
||||||
ContentAddress LocalStore::hashCAPath(
|
ContentAddress LocalStore::hashCAPath(
|
||||||
const ContentAddressMethod & method, const HashType & hashType,
|
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
|
||||||
const StorePath & path)
|
const StorePath & path)
|
||||||
{
|
{
|
||||||
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
|
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
|
||||||
}
|
}
|
||||||
|
|
||||||
ContentAddress LocalStore::hashCAPath(
|
ContentAddress LocalStore::hashCAPath(
|
||||||
const ContentAddressMethod & method,
|
const ContentAddressMethod & method,
|
||||||
const HashType & hashType,
|
const HashAlgorithm & hashAlgo,
|
||||||
const Path & path,
|
const Path & path,
|
||||||
const std::string_view pathHash
|
const std::string_view pathHash
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
HashModuloSink caSink ( hashType, std::string(pathHash) );
|
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const TextIngestionMethod &) {
|
[&](const TextIngestionMethod &) {
|
||||||
readFile(path, caSink);
|
readFile(path, caSink);
|
||||||
|
|
|
@ -178,7 +178,7 @@ public:
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
|
|
||||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||||
|
|
||||||
StorePath addTextToStore(
|
StorePath addTextToStore(
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
|
@ -353,12 +353,12 @@ private:
|
||||||
// XXX: Make a generic `Store` method
|
// XXX: Make a generic `Store` method
|
||||||
ContentAddress hashCAPath(
|
ContentAddress hashCAPath(
|
||||||
const ContentAddressMethod & method,
|
const ContentAddressMethod & method,
|
||||||
const HashType & hashType,
|
const HashAlgorithm & hashAlgo,
|
||||||
const StorePath & path);
|
const StorePath & path);
|
||||||
|
|
||||||
ContentAddress hashCAPath(
|
ContentAddress hashCAPath(
|
||||||
const ContentAddressMethod & method,
|
const ContentAddressMethod & method,
|
||||||
const HashType & hashType,
|
const HashAlgorithm & hashAlgo,
|
||||||
const Path & path,
|
const Path & path,
|
||||||
const std::string_view pathHash
|
const std::string_view pathHash
|
||||||
);
|
);
|
||||||
|
|
|
@ -43,7 +43,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
||||||
|
|
||||||
sink.s = rewriteStrings(sink.s, rewrites);
|
sink.s = rewriteStrings(sink.s, rewrites);
|
||||||
|
|
||||||
HashModuloSink hashModuloSink(htSHA256, oldHashPart);
|
HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart);
|
||||||
hashModuloSink(sink.s);
|
hashModuloSink(sink.s);
|
||||||
|
|
||||||
auto narModuloHash = hashModuloSink.finish().first;
|
auto narModuloHash = hashModuloSink.finish().first;
|
||||||
|
@ -66,7 +66,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
|
||||||
rsink2(sink.s);
|
rsink2(sink.s);
|
||||||
rsink2.flush();
|
rsink2.flush();
|
||||||
|
|
||||||
info.narHash = hashString(htSHA256, sink2.s);
|
info.narHash = hashString(HashAlgorithm::SHA256, sink2.s);
|
||||||
info.narSize = sink.s.size();
|
info.narSize = sink.s.size();
|
||||||
|
|
||||||
StringSource source(sink2.s);
|
StringSource source(sink2.s);
|
||||||
|
|
|
@ -333,9 +333,9 @@ public:
|
||||||
(std::string(info->path.name()))
|
(std::string(info->path.name()))
|
||||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash)
|
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash)
|
||||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||||
(info->narHash.to_string(HashFormat::Base32, true))
|
(info->narHash.to_string(HashFormat::Nix32, true))
|
||||||
(info->narSize)
|
(info->narSize)
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
|
|
|
@ -113,11 +113,11 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
res += "URL: " + url + "\n";
|
res += "URL: " + url + "\n";
|
||||||
assert(compression != "");
|
assert(compression != "");
|
||||||
res += "Compression: " + compression + "\n";
|
res += "Compression: " + compression + "\n";
|
||||||
assert(fileHash && fileHash->type == htSHA256);
|
assert(fileHash && fileHash->algo == HashAlgorithm::SHA256);
|
||||||
res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n";
|
res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n";
|
||||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||||
assert(narHash.type == htSHA256);
|
assert(narHash.algo == HashAlgorithm::SHA256);
|
||||||
res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n";
|
res += "NarHash: " + narHash.to_string(HashFormat::Nix32, true) + "\n";
|
||||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||||
|
|
||||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||||
|
|
|
@ -146,17 +146,17 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
Also note that if `path' is a symlink, then we're hashing the
|
Also note that if `path' is a symlink, then we're hashing the
|
||||||
contents of the symlink (i.e. the result of readlink()), not
|
contents of the symlink (i.e. the result of readlink()), not
|
||||||
the contents of the target (which may not even exist). */
|
the contents of the target (which may not even exist). */
|
||||||
Hash hash = hashPath(htSHA256, path).first;
|
Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
|
||||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true));
|
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||||
|
|
||||||
/* Check if this is a known hash. */
|
/* Check if this is a known hash. */
|
||||||
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false);
|
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false);
|
||||||
|
|
||||||
/* Maybe delete the link, if it has been corrupted. */
|
/* Maybe delete the link, if it has been corrupted. */
|
||||||
if (pathExists(linkPath)) {
|
if (pathExists(linkPath)) {
|
||||||
auto stLink = lstat(linkPath);
|
auto stLink = lstat(linkPath);
|
||||||
if (st.st_size != stLink.st_size
|
if (st.st_size != stLink.st_size
|
||||||
|| (repair && hash != hashPath(htSHA256, linkPath).first))
|
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
|
||||||
{
|
{
|
||||||
// XXX: Consider overwriting linkPath with our valid version.
|
// XXX: Consider overwriting linkPath with our valid version.
|
||||||
warn("removing corrupted link '%s'", linkPath);
|
warn("removing corrupted link '%s'", linkPath);
|
||||||
|
|
|
@ -146,7 +146,7 @@ static nlohmann::json pathInfoToJSON(
|
||||||
auto info = store.queryPathInfo(storePath);
|
auto info = store.queryPathInfo(storePath);
|
||||||
|
|
||||||
auto & jsonPath = jsonList.emplace_back(
|
auto & jsonPath = jsonList.emplace_back(
|
||||||
info->toJSON(store, false, HashFormat::Base32));
|
info->toJSON(store, false, HashFormat::Nix32));
|
||||||
|
|
||||||
// Add the path to the object whose metadata we are including.
|
// Add the path to the object whose metadata we are including.
|
||||||
jsonPath["path"] = store.printStorePath(storePath);
|
jsonPath["path"] = store.printStorePath(storePath);
|
||||||
|
|
|
@ -32,7 +32,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
store.printStorePath(path));
|
store.printStorePath(path));
|
||||||
return
|
return
|
||||||
"1;" + store.printStorePath(path) + ";"
|
"1;" + store.printStorePath(path) + ";"
|
||||||
+ narHash.to_string(HashFormat::Base32, true) + ";"
|
+ narHash.to_string(HashFormat::Nix32, true) + ";"
|
||||||
+ std::to_string(narSize) + ";"
|
+ std::to_string(narSize) + ";"
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ std::pair<StorePathSet, HashResult> scanForReferences(
|
||||||
const std::string & path,
|
const std::string & path,
|
||||||
const StorePathSet & refs)
|
const StorePathSet & refs)
|
||||||
{
|
{
|
||||||
HashSink hashSink { htSHA256 };
|
HashSink hashSink { HashAlgorithm::SHA256 };
|
||||||
auto found = scanForReferences(hashSink, path, refs);
|
auto found = scanForReferences(hashSink, path, refs);
|
||||||
auto hash = hashSink.finish();
|
auto hash = hashSink.finish();
|
||||||
return std::pair<StorePathSet, HashResult>(found, hash);
|
return std::pair<StorePathSet, HashResult>(found, hash);
|
||||||
|
|
|
@ -35,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName)
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
||||||
: baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name)))
|
: baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name)))
|
||||||
{
|
{
|
||||||
checkName(baseName, name());
|
checkName(baseName, name());
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
|
||||||
|
|
||||||
StorePath StorePath::random(std::string_view name)
|
StorePath StorePath::random(std::string_view name)
|
||||||
{
|
{
|
||||||
Hash hash(htSHA1);
|
Hash hash(HashAlgorithm::SHA1);
|
||||||
randombytes_buf(hash.hash, hash.hashSize);
|
randombytes_buf(hash.hash, hash.hashSize);
|
||||||
return StorePath(hash, name);
|
return StorePath(hash, name);
|
||||||
}
|
}
|
||||||
|
|
|
@ -420,7 +420,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
ContentAddressMethod caMethod,
|
ContentAddressMethod caMethod,
|
||||||
HashType hashType,
|
HashAlgorithm hashAlgo,
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
RepairFlag repair)
|
RepairFlag repair)
|
||||||
{
|
{
|
||||||
|
@ -432,7 +432,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
conn->to
|
conn->to
|
||||||
<< WorkerProto::Op::AddToStore
|
<< WorkerProto::Op::AddToStore
|
||||||
<< name
|
<< name
|
||||||
<< caMethod.render(hashType);
|
<< caMethod.render(hashAlgo);
|
||||||
WorkerProto::write(*this, *conn, references);
|
WorkerProto::write(*this, *conn, references);
|
||||||
conn->to << repair;
|
conn->to << repair;
|
||||||
|
|
||||||
|
@ -453,9 +453,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
|
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const TextIngestionMethod & thm) -> void {
|
[&](const TextIngestionMethod & thm) -> void {
|
||||||
if (hashType != htSHA256)
|
if (hashAlgo != HashAlgorithm::SHA256)
|
||||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||||
name, printHashType(hashType));
|
name, printHashAlgo(hashAlgo));
|
||||||
std::string s = dump.drain();
|
std::string s = dump.drain();
|
||||||
conn->to << WorkerProto::Op::AddTextToStore << name << s;
|
conn->to << WorkerProto::Op::AddTextToStore << name << s;
|
||||||
WorkerProto::write(*this, *conn, references);
|
WorkerProto::write(*this, *conn, references);
|
||||||
|
@ -465,9 +465,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
conn->to
|
conn->to
|
||||||
<< WorkerProto::Op::AddToStore
|
<< WorkerProto::Op::AddToStore
|
||||||
<< name
|
<< name
|
||||||
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||||
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
|
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
|
||||||
<< printHashType(hashType);
|
<< printHashAlgo(hashAlgo);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
conn->to.written = 0;
|
conn->to.written = 0;
|
||||||
|
@ -503,9 +503,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
||||||
|
|
||||||
|
|
||||||
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
|
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||||
{
|
{
|
||||||
return addCAToStore(dump, name, method, hashType, references, repair)->path;
|
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -610,7 +610,7 @@ StorePath RemoteStore::addTextToStore(
|
||||||
RepairFlag repair)
|
RepairFlag repair)
|
||||||
{
|
{
|
||||||
StringSource source(s);
|
StringSource source(s);
|
||||||
return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path;
|
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
|
||||||
}
|
}
|
||||||
|
|
||||||
void RemoteStore::registerDrvOutput(const Realisation & info)
|
void RemoteStore::registerDrvOutput(const Realisation & info)
|
||||||
|
|
|
@ -77,7 +77,7 @@ public:
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
ContentAddressMethod caMethod,
|
ContentAddressMethod caMethod,
|
||||||
HashType hashType,
|
HashAlgorithm hashAlgo,
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
RepairFlag repair);
|
RepairFlag repair);
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ public:
|
||||||
* Add a content-addressable store path. Does not support references. `dump` will be drained.
|
* Add a content-addressable store path. Does not support references. `dump` will be drained.
|
||||||
*/
|
*/
|
||||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & nar,
|
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
#include "serve-protocol.hh"
|
#include "serve-protocol.hh"
|
||||||
#include "serve-protocol-impl.hh"
|
#include "serve-protocol-impl.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
|
#include "path-info.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
@ -54,4 +55,83 @@ void ServeProto::Serialise<BuildResult>::write(const StoreDirConfig & store, Ser
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
UnkeyedValidPathInfo ServeProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
|
||||||
|
{
|
||||||
|
/* Hash should be set below unless very old `nix-store --serve`.
|
||||||
|
Caller should assert that it did set it. */
|
||||||
|
UnkeyedValidPathInfo info { Hash::dummy };
|
||||||
|
|
||||||
|
auto deriver = readString(conn.from);
|
||||||
|
if (deriver != "")
|
||||||
|
info.deriver = store.parseStorePath(deriver);
|
||||||
|
info.references = ServeProto::Serialise<StorePathSet>::read(store, conn);
|
||||||
|
|
||||||
|
readLongLong(conn.from); // download size, unused
|
||||||
|
info.narSize = readLongLong(conn.from);
|
||||||
|
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 4) {
|
||||||
|
auto s = readString(conn.from);
|
||||||
|
if (!s.empty())
|
||||||
|
info.narHash = Hash::parseAnyPrefixed(s);
|
||||||
|
info.ca = ContentAddress::parseOpt(readString(conn.from));
|
||||||
|
info.sigs = readStrings<StringSet>(conn.from);
|
||||||
|
}
|
||||||
|
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ServeProto::Serialise<UnkeyedValidPathInfo>::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info)
|
||||||
|
{
|
||||||
|
conn.to
|
||||||
|
<< (info.deriver ? store.printStorePath(*info.deriver) : "");
|
||||||
|
|
||||||
|
ServeProto::write(store, conn, info.references);
|
||||||
|
// !!! Maybe we want compression?
|
||||||
|
conn.to
|
||||||
|
<< info.narSize // downloadSize, lie a little
|
||||||
|
<< info.narSize;
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 4)
|
||||||
|
conn.to
|
||||||
|
<< info.narHash.to_string(HashFormat::Nix32, true)
|
||||||
|
<< renderContentAddress(info.ca)
|
||||||
|
<< info.sigs;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ServeProto::BuildOptions ServeProto::Serialise<ServeProto::BuildOptions>::read(const StoreDirConfig & store, ReadConn conn)
|
||||||
|
{
|
||||||
|
BuildOptions options;
|
||||||
|
options.maxSilentTime = readInt(conn.from);
|
||||||
|
options.buildTimeout = readInt(conn.from);
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 2)
|
||||||
|
options.maxLogSize = readNum<unsigned long>(conn.from);
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 3) {
|
||||||
|
options.nrRepeats = readInt(conn.from);
|
||||||
|
options.enforceDeterminism = readInt(conn.from);
|
||||||
|
}
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 7) {
|
||||||
|
options.keepFailed = (bool) readInt(conn.from);
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ServeProto::Serialise<ServeProto::BuildOptions>::write(const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options)
|
||||||
|
{
|
||||||
|
conn.to
|
||||||
|
<< options.maxSilentTime
|
||||||
|
<< options.buildTimeout;
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 2)
|
||||||
|
conn.to
|
||||||
|
<< options.maxLogSize;
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 3)
|
||||||
|
conn.to
|
||||||
|
<< options.nrRepeats
|
||||||
|
<< options.enforceDeterminism;
|
||||||
|
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.version) >= 7) {
|
||||||
|
conn.to << ((int) options.keepFailed);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ struct Source;
|
||||||
|
|
||||||
// items being serialised
|
// items being serialised
|
||||||
struct BuildResult;
|
struct BuildResult;
|
||||||
|
struct UnkeyedValidPathInfo;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -86,6 +87,13 @@ struct ServeProto
|
||||||
{
|
{
|
||||||
ServeProto::Serialise<T>::write(store, conn, t);
|
ServeProto::Serialise<T>::write(store, conn, t);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for building shared between
|
||||||
|
* `ServeProto::Command::BuildPaths` and
|
||||||
|
* `ServeProto::Command::BuildDerivation`.
|
||||||
|
*/
|
||||||
|
struct BuildOptions;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum struct ServeProto::Command : uint64_t
|
enum struct ServeProto::Command : uint64_t
|
||||||
|
@ -101,6 +109,22 @@ enum struct ServeProto::Command : uint64_t
|
||||||
AddToStoreNar = 9,
|
AddToStoreNar = 9,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct ServeProto::BuildOptions {
|
||||||
|
/**
|
||||||
|
* Default value in this and every other field is so tests pass when
|
||||||
|
* testing older deserialisers which do not set all the fields.
|
||||||
|
*/
|
||||||
|
time_t maxSilentTime = -1;
|
||||||
|
time_t buildTimeout = -1;
|
||||||
|
size_t maxLogSize = -1;
|
||||||
|
size_t nrRepeats = -1;
|
||||||
|
bool enforceDeterminism = -1;
|
||||||
|
bool keepFailed = -1;
|
||||||
|
|
||||||
|
bool operator == (const ServeProto::BuildOptions &) const = default;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convenience for sending operation codes.
|
* Convenience for sending operation codes.
|
||||||
*
|
*
|
||||||
|
@ -141,6 +165,10 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op)
|
||||||
|
|
||||||
template<>
|
template<>
|
||||||
DECLARE_SERVE_SERIALISER(BuildResult);
|
DECLARE_SERVE_SERIALISER(BuildResult);
|
||||||
|
template<>
|
||||||
|
DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo);
|
||||||
|
template<>
|
||||||
|
DECLARE_SERVE_SERIALISER(ServeProto::BuildOptions);
|
||||||
|
|
||||||
template<typename T>
|
template<typename T>
|
||||||
DECLARE_SERVE_SERIALISER(std::vector<T>);
|
DECLARE_SERVE_SERIALISER(std::vector<T>);
|
||||||
|
|
|
@ -153,7 +153,7 @@ StorePath StoreDirConfig::makeStorePath(std::string_view type,
|
||||||
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
||||||
auto s = std::string(type) + ":" + std::string(hash)
|
auto s = std::string(type) + ":" + std::string(hash)
|
||||||
+ ":" + storeDir + ":" + std::string(name);
|
+ ":" + storeDir + ":" + std::string(name);
|
||||||
auto h = compressHash(hashString(htSHA256, s), 20);
|
auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20);
|
||||||
return StorePath(h, name);
|
return StorePath(h, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,12 +191,12 @@ static std::string makeType(
|
||||||
|
|
||||||
StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
|
StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
|
||||||
{
|
{
|
||||||
if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) {
|
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
|
||||||
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
|
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
|
||||||
} else {
|
} else {
|
||||||
assert(info.references.size() == 0);
|
assert(info.references.size() == 0);
|
||||||
return makeStorePath("output:out",
|
return makeStorePath("output:out",
|
||||||
hashString(htSHA256,
|
hashString(HashAlgorithm::SHA256,
|
||||||
"fixed:out:"
|
"fixed:out:"
|
||||||
+ makeFileIngestionPrefix(info.method)
|
+ makeFileIngestionPrefix(info.method)
|
||||||
+ info.hash.to_string(HashFormat::Base16, true) + ":"),
|
+ info.hash.to_string(HashFormat::Base16, true) + ":"),
|
||||||
|
@ -207,7 +207,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
|
||||||
|
|
||||||
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
|
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
|
||||||
{
|
{
|
||||||
assert(info.hash.type == htSHA256);
|
assert(info.hash.algo == HashAlgorithm::SHA256);
|
||||||
return makeStorePath(
|
return makeStorePath(
|
||||||
makeType(*this, "text", StoreReferences {
|
makeType(*this, "text", StoreReferences {
|
||||||
.others = info.references,
|
.others = info.references,
|
||||||
|
@ -236,7 +236,7 @@ std::pair<StorePath, Hash> StoreDirConfig::computeStorePathFromDump(
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
HashType hashAlgo,
|
HashAlgorithm hashAlgo,
|
||||||
const StorePathSet & references) const
|
const StorePathSet & references) const
|
||||||
{
|
{
|
||||||
HashSink sink(hashAlgo);
|
HashSink sink(hashAlgo);
|
||||||
|
@ -257,7 +257,7 @@ StorePath StoreDirConfig::computeStorePathForText(
|
||||||
const StorePathSet & references) const
|
const StorePathSet & references) const
|
||||||
{
|
{
|
||||||
return makeTextPath(name, TextInfo {
|
return makeTextPath(name, TextInfo {
|
||||||
.hash = hashString(htSHA256, s),
|
.hash = hashString(HashAlgorithm::SHA256, s),
|
||||||
.references = references,
|
.references = references,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -267,7 +267,7 @@ StorePath Store::addToStore(
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
const Path & _srcPath,
|
const Path & _srcPath,
|
||||||
FileIngestionMethod method,
|
FileIngestionMethod method,
|
||||||
HashType hashAlgo,
|
HashAlgorithm hashAlgo,
|
||||||
PathFilter & filter,
|
PathFilter & filter,
|
||||||
RepairFlag repair,
|
RepairFlag repair,
|
||||||
const StorePathSet & references)
|
const StorePathSet & references)
|
||||||
|
@ -405,10 +405,10 @@ digraph graphname {
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
FileIngestionMethod method, HashType hashAlgo,
|
FileIngestionMethod method, HashAlgorithm hashAlgo,
|
||||||
std::optional<Hash> expectedCAHash)
|
std::optional<Hash> expectedCAHash)
|
||||||
{
|
{
|
||||||
HashSink narHashSink { htSHA256 };
|
HashSink narHashSink { HashAlgorithm::SHA256 };
|
||||||
HashSink caHashSink { hashAlgo };
|
HashSink caHashSink { hashAlgo };
|
||||||
|
|
||||||
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
|
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
|
||||||
|
@ -417,7 +417,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
RegularFileSink fileSink { caHashSink };
|
RegularFileSink fileSink { caHashSink };
|
||||||
TeeSink unusualHashTee { narHashSink, caHashSink };
|
TeeSink unusualHashTee { narHashSink, caHashSink };
|
||||||
|
|
||||||
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
|
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256
|
||||||
? static_cast<Sink &>(unusualHashTee)
|
? static_cast<Sink &>(unusualHashTee)
|
||||||
: narHashSink;
|
: narHashSink;
|
||||||
|
|
||||||
|
@ -445,7 +445,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
finish. */
|
finish. */
|
||||||
auto [narHash, narSize] = narHashSink.finish();
|
auto [narHash, narSize] = narHashSink.finish();
|
||||||
|
|
||||||
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
|
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
|
||||||
? narHash
|
? narHash
|
||||||
: caHashSink.finish().first;
|
: caHashSink.finish().first;
|
||||||
|
|
||||||
|
@ -1205,7 +1205,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
|
||||||
if (!hashGiven) {
|
if (!hashGiven) {
|
||||||
std::string s;
|
std::string s;
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
auto narHash = Hash::parseAny(s, htSHA256);
|
auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256);
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
auto narSize = string2Int<uint64_t>(s);
|
auto narSize = string2Int<uint64_t>(s);
|
||||||
if (!narSize) throw Error("number expected");
|
if (!narSize) throw Error("number expected");
|
||||||
|
|
|
@ -430,7 +430,7 @@ public:
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
const Path & srcPath,
|
const Path & srcPath,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||||
HashType hashAlgo = htSHA256,
|
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||||
PathFilter & filter = defaultPathFilter,
|
PathFilter & filter = defaultPathFilter,
|
||||||
RepairFlag repair = NoRepair,
|
RepairFlag repair = NoRepair,
|
||||||
const StorePathSet & references = StorePathSet());
|
const StorePathSet & references = StorePathSet());
|
||||||
|
@ -441,7 +441,7 @@ public:
|
||||||
* memory.
|
* memory.
|
||||||
*/
|
*/
|
||||||
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
|
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||||
std::optional<Hash> expectedCAHash = {});
|
std::optional<Hash> expectedCAHash = {});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -454,7 +454,7 @@ public:
|
||||||
* \todo remove?
|
* \todo remove?
|
||||||
*/
|
*/
|
||||||
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair,
|
||||||
const StorePathSet & references = StorePathSet())
|
const StorePathSet & references = StorePathSet())
|
||||||
{ unsupported("addToStoreFromDump"); }
|
{ unsupported("addToStoreFromDump"); }
|
||||||
|
|
||||||
|
|
|
@ -98,7 +98,7 @@ struct StoreDirConfig : public Config
|
||||||
Source & dump,
|
Source & dump,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||||
HashType hashAlgo = htSHA256,
|
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||||
const StorePathSet & references = {}) const;
|
const StorePathSet & references = {}) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -160,7 +160,7 @@ void WorkerProto::Serialise<ValidPathInfo>::write(const StoreDirConfig & store,
|
||||||
UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
|
UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
|
||||||
{
|
{
|
||||||
auto deriver = readString(conn.from);
|
auto deriver = readString(conn.from);
|
||||||
auto narHash = Hash::parseAny(readString(conn.from), htSHA256);
|
auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256);
|
||||||
UnkeyedValidPathInfo info(narHash);
|
UnkeyedValidPathInfo info(narHash);
|
||||||
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
||||||
info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn);
|
info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn);
|
||||||
|
|
|
@ -544,36 +544,70 @@ nlohmann::json Args::toJSON()
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||||
{
|
{
|
||||||
for (auto & type : hashTypes)
|
for (auto & format : hashFormats) {
|
||||||
if (hasPrefix(type, prefix))
|
if (hasPrefix(format, prefix)) {
|
||||||
completions.add(type);
|
completions.add(format);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
|
Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) {
|
||||||
{
|
assert(*hf == nix::HashFormat::SRI);
|
||||||
return Flag {
|
return Flag{
|
||||||
.longName = std::move(longName),
|
.longName = std::move(longName),
|
||||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
|
.description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'",
|
||||||
.labels = {"hash-algo"},
|
.labels = {"hash-format"},
|
||||||
.handler = {[ht](std::string s) {
|
.handler = {[hf](std::string s) {
|
||||||
*ht = parseHashType(s);
|
*hf = parseHashFormat(s);
|
||||||
}},
|
}},
|
||||||
.completer = hashTypeCompleter,
|
.completer = hashFormatCompleter,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht)
|
Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf) {
|
||||||
|
return Flag{
|
||||||
|
.longName = std::move(longName),
|
||||||
|
.description = "hash format ('base16', 'nix32', 'base64', 'sri').",
|
||||||
|
.labels = {"hash-format"},
|
||||||
|
.handler = {[ohf](std::string s) {
|
||||||
|
*ohf = std::optional<HashFormat>{parseHashFormat(s)};
|
||||||
|
}},
|
||||||
|
.completer = hashFormatCompleter,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||||
{
|
{
|
||||||
return Flag {
|
for (auto & algo : hashAlgorithms)
|
||||||
|
if (hasPrefix(algo, prefix))
|
||||||
|
completions.add(algo);
|
||||||
|
}
|
||||||
|
|
||||||
|
Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha)
|
||||||
|
{
|
||||||
|
return Flag{
|
||||||
|
.longName = std::move(longName),
|
||||||
|
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
|
||||||
|
.labels = {"hash-algo"},
|
||||||
|
.handler = {[ha](std::string s) {
|
||||||
|
*ha = parseHashAlgo(s);
|
||||||
|
}},
|
||||||
|
.completer = hashAlgoCompleter,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha)
|
||||||
|
{
|
||||||
|
return Flag{
|
||||||
.longName = std::move(longName),
|
.longName = std::move(longName),
|
||||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
|
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
|
||||||
.labels = {"hash-algo"},
|
.labels = {"hash-algo"},
|
||||||
.handler = {[oht](std::string s) {
|
.handler = {[oha](std::string s) {
|
||||||
*oht = std::optional<HashType> { parseHashType(s) };
|
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
|
||||||
}},
|
}},
|
||||||
.completer = hashTypeCompleter,
|
.completer = hashAlgoCompleter,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -622,8 +656,9 @@ std::optional<ExperimentalFeature> Command::experimentalFeature ()
|
||||||
return { Xp::NixCommand };
|
return { Xp::NixCommand };
|
||||||
}
|
}
|
||||||
|
|
||||||
MultiCommand::MultiCommand(const Commands & commands_)
|
MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_)
|
||||||
: commands(commands_)
|
: commands(commands_)
|
||||||
|
, commandName(commandName)
|
||||||
{
|
{
|
||||||
expectArgs({
|
expectArgs({
|
||||||
.label = "subcommand",
|
.label = "subcommand",
|
||||||
|
|
|
@ -14,7 +14,8 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
enum HashType : char;
|
enum struct HashAlgorithm : char;
|
||||||
|
enum struct HashFormat : int;
|
||||||
|
|
||||||
class MultiCommand;
|
class MultiCommand;
|
||||||
|
|
||||||
|
@ -175,8 +176,10 @@ protected:
|
||||||
|
|
||||||
std::optional<ExperimentalFeature> experimentalFeature;
|
std::optional<ExperimentalFeature> experimentalFeature;
|
||||||
|
|
||||||
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
|
static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha);
|
||||||
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
|
static Flag mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha);
|
||||||
|
static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf);
|
||||||
|
static Flag mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -356,13 +359,16 @@ public:
|
||||||
*/
|
*/
|
||||||
std::optional<std::pair<std::string, ref<Command>>> command;
|
std::optional<std::pair<std::string, ref<Command>>> command;
|
||||||
|
|
||||||
MultiCommand(const Commands & commands);
|
MultiCommand(std::string_view commandName, const Commands & commands);
|
||||||
|
|
||||||
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
|
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
|
||||||
|
|
||||||
bool processArgs(const Strings & args, bool finish) override;
|
bool processArgs(const Strings & args, bool finish) override;
|
||||||
|
|
||||||
nlohmann::json toJSON() override;
|
nlohmann::json toJSON() override;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
std::string commandName = "";
|
||||||
};
|
};
|
||||||
|
|
||||||
Strings argvToStrings(int argc, char * * argv);
|
Strings argvToStrings(int argc, char * * argv);
|
||||||
|
|
|
@ -205,8 +205,19 @@ public:
|
||||||
* `CanonPath(this.makeRelative(x), this) == path`.
|
* `CanonPath(this.makeRelative(x), this) == path`.
|
||||||
*/
|
*/
|
||||||
std::string makeRelative(const CanonPath & path) const;
|
std::string makeRelative(const CanonPath & path) const;
|
||||||
|
|
||||||
|
friend class std::hash<CanonPath>;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & stream, const CanonPath & path);
|
std::ostream & operator << (std::ostream & stream, const CanonPath & path);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct std::hash<nix::CanonPath>
|
||||||
|
{
|
||||||
|
std::size_t operator ()(const nix::CanonPath & s) const noexcept
|
||||||
|
{
|
||||||
|
return std::hash<std::string>{}(s.path);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
|
@ -80,12 +80,11 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
|
||||||
.description = R"(
|
.description = R"(
|
||||||
Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language.
|
Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language.
|
||||||
|
|
||||||
`fetchTree` exposes a large suite of fetching functionality in a more systematic way.
|
`fetchTree` exposes a generic interface for fetching remote file system trees from different types of remote sources.
|
||||||
The [`flakes`](#xp-feature-flakes) feature flag always enables `fetch-tree`.
|
The [`flakes`](#xp-feature-flakes) feature flag always enables `fetch-tree`.
|
||||||
|
This built-in was previously guarded by the `flakes` experimental feature because of that overlap.
|
||||||
|
|
||||||
This built-in was previously guarded by the `flakes` experimental feature because of that overlap,
|
Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
|
||||||
but since the plan is to work on stabilizing this first (due 2024 Q1), we are putting it underneath a separate feature.
|
|
||||||
Once we've made the changes we want to make, enabling just this feature will serve as a "release candidate" --- allowing users to try out the functionality we want to stabilize and not any other functionality we don't yet want to, in isolation.
|
|
||||||
)",
|
)",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -106,7 +106,7 @@ void parse(
|
||||||
std::string hashs = getString(source, 20);
|
std::string hashs = getString(source, 20);
|
||||||
left -= 20;
|
left -= 20;
|
||||||
|
|
||||||
Hash hash(htSHA1);
|
Hash hash(HashAlgorithm::SHA1);
|
||||||
std::copy(hashs.begin(), hashs.end(), hash.hash);
|
std::copy(hashs.begin(), hashs.end(), hash.hash);
|
||||||
|
|
||||||
hook(name, TreeEntry {
|
hook(name, TreeEntry {
|
||||||
|
@ -241,12 +241,12 @@ Mode dump(
|
||||||
|
|
||||||
|
|
||||||
TreeEntry dumpHash(
|
TreeEntry dumpHash(
|
||||||
HashType ht,
|
HashAlgorithm ha,
|
||||||
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
|
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
|
||||||
{
|
{
|
||||||
std::function<DumpHook> hook;
|
std::function<DumpHook> hook;
|
||||||
hook = [&](const CanonPath & path) -> TreeEntry {
|
hook = [&](const CanonPath & path) -> TreeEntry {
|
||||||
auto hashSink = HashSink(ht);
|
auto hashSink = HashSink(ha);
|
||||||
auto mode = dump(accessor, path, hashSink, hook, filter);
|
auto mode = dump(accessor, path, hashSink, hook, filter);
|
||||||
auto hash = hashSink.finish().first;
|
auto hash = hashSink.finish().first;
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -123,7 +123,7 @@ Mode dump(
|
||||||
* A smaller wrapper around `dump`.
|
* A smaller wrapper around `dump`.
|
||||||
*/
|
*/
|
||||||
TreeEntry dumpHash(
|
TreeEntry dumpHash(
|
||||||
HashType ht,
|
HashAlgorithm ha,
|
||||||
SourceAccessor & accessor, const CanonPath & path,
|
SourceAccessor & accessor, const CanonPath & path,
|
||||||
PathFilter & filter = defaultPathFilter);
|
PathFilter & filter = defaultPathFilter);
|
||||||
|
|
||||||
|
|
|
@ -16,23 +16,24 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
static size_t regularHashSize(HashType type) {
|
static size_t regularHashSize(HashAlgorithm type) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case htMD5: return md5HashSize;
|
case HashAlgorithm::MD5: return md5HashSize;
|
||||||
case htSHA1: return sha1HashSize;
|
case HashAlgorithm::SHA1: return sha1HashSize;
|
||||||
case htSHA256: return sha256HashSize;
|
case HashAlgorithm::SHA256: return sha256HashSize;
|
||||||
case htSHA512: return sha512HashSize;
|
case HashAlgorithm::SHA512: return sha512HashSize;
|
||||||
}
|
}
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
|
const std::set<std::string> hashAlgorithms = {"md5", "sha1", "sha256", "sha512" };
|
||||||
|
|
||||||
|
const std::set<std::string> hashFormats = {"base64", "nix32", "base16", "sri" };
|
||||||
|
|
||||||
Hash::Hash(HashType type) : type(type)
|
Hash::Hash(HashAlgorithm algo) : algo(algo)
|
||||||
{
|
{
|
||||||
hashSize = regularHashSize(type);
|
hashSize = regularHashSize(algo);
|
||||||
assert(hashSize <= maxHashSize);
|
assert(hashSize <= maxHashSize);
|
||||||
memset(hash, 0, maxHashSize);
|
memset(hash, 0, maxHashSize);
|
||||||
}
|
}
|
||||||
|
@ -81,7 +82,7 @@ static std::string printHash16(const Hash & hash)
|
||||||
|
|
||||||
|
|
||||||
// omitted: E O U T
|
// omitted: E O U T
|
||||||
const std::string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
|
const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
|
||||||
|
|
||||||
|
|
||||||
static std::string printHash32(const Hash & hash)
|
static std::string printHash32(const Hash & hash)
|
||||||
|
@ -100,7 +101,7 @@ static std::string printHash32(const Hash & hash)
|
||||||
unsigned char c =
|
unsigned char c =
|
||||||
(hash.hash[i] >> j)
|
(hash.hash[i] >> j)
|
||||||
| (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j));
|
| (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j));
|
||||||
s.push_back(base32Chars[c & 0x1f]);
|
s.push_back(nix32Chars[c & 0x1f]);
|
||||||
}
|
}
|
||||||
|
|
||||||
return s;
|
return s;
|
||||||
|
@ -109,23 +110,23 @@ static std::string printHash32(const Hash & hash)
|
||||||
|
|
||||||
std::string printHash16or32(const Hash & hash)
|
std::string printHash16or32(const Hash & hash)
|
||||||
{
|
{
|
||||||
assert(hash.type);
|
assert(static_cast<char>(hash.algo));
|
||||||
return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false);
|
return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
|
std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const
|
||||||
{
|
{
|
||||||
std::string s;
|
std::string s;
|
||||||
if (hashFormat == HashFormat::SRI || includeType) {
|
if (hashFormat == HashFormat::SRI || includeAlgo) {
|
||||||
s += printHashType(type);
|
s += printHashAlgo(algo);
|
||||||
s += hashFormat == HashFormat::SRI ? '-' : ':';
|
s += hashFormat == HashFormat::SRI ? '-' : ':';
|
||||||
}
|
}
|
||||||
switch (hashFormat) {
|
switch (hashFormat) {
|
||||||
case HashFormat::Base16:
|
case HashFormat::Base16:
|
||||||
s += printHash16(*this);
|
s += printHash16(*this);
|
||||||
break;
|
break;
|
||||||
case HashFormat::Base32:
|
case HashFormat::Nix32:
|
||||||
s += printHash32(*this);
|
s += printHash32(*this);
|
||||||
break;
|
break;
|
||||||
case HashFormat::Base64:
|
case HashFormat::Base64:
|
||||||
|
@ -136,7 +137,7 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash Hash::dummy(htSHA256);
|
Hash Hash::dummy(HashAlgorithm::SHA256);
|
||||||
|
|
||||||
Hash Hash::parseSRI(std::string_view original) {
|
Hash Hash::parseSRI(std::string_view original) {
|
||||||
auto rest = original;
|
auto rest = original;
|
||||||
|
@ -145,18 +146,18 @@ Hash Hash::parseSRI(std::string_view original) {
|
||||||
auto hashRaw = splitPrefixTo(rest, '-');
|
auto hashRaw = splitPrefixTo(rest, '-');
|
||||||
if (!hashRaw)
|
if (!hashRaw)
|
||||||
throw BadHash("hash '%s' is not SRI", original);
|
throw BadHash("hash '%s' is not SRI", original);
|
||||||
HashType parsedType = parseHashType(*hashRaw);
|
HashAlgorithm parsedType = parseHashAlgo(*hashRaw);
|
||||||
|
|
||||||
return Hash(rest, parsedType, true);
|
return Hash(rest, parsedType, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mutates the string to eliminate the prefixes when found
|
// Mutates the string to eliminate the prefixes when found
|
||||||
static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_view & rest)
|
static std::pair<std::optional<HashAlgorithm>, bool> getParsedTypeAndSRI(std::string_view & rest)
|
||||||
{
|
{
|
||||||
bool isSRI = false;
|
bool isSRI = false;
|
||||||
|
|
||||||
// Parse the hash type before the separator, if there was one.
|
// Parse the hash type before the separator, if there was one.
|
||||||
std::optional<HashType> optParsedType;
|
std::optional<HashAlgorithm> optParsedType;
|
||||||
{
|
{
|
||||||
auto hashRaw = splitPrefixTo(rest, ':');
|
auto hashRaw = splitPrefixTo(rest, ':');
|
||||||
|
|
||||||
|
@ -166,7 +167,7 @@ static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_
|
||||||
isSRI = true;
|
isSRI = true;
|
||||||
}
|
}
|
||||||
if (hashRaw)
|
if (hashRaw)
|
||||||
optParsedType = parseHashType(*hashRaw);
|
optParsedType = parseHashAlgo(*hashRaw);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {optParsedType, isSRI};
|
return {optParsedType, isSRI};
|
||||||
|
@ -185,29 +186,29 @@ Hash Hash::parseAnyPrefixed(std::string_view original)
|
||||||
return Hash(rest, *optParsedType, isSRI);
|
return Hash(rest, *optParsedType, isSRI);
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash Hash::parseAny(std::string_view original, std::optional<HashType> optType)
|
Hash Hash::parseAny(std::string_view original, std::optional<HashAlgorithm> optAlgo)
|
||||||
{
|
{
|
||||||
auto rest = original;
|
auto rest = original;
|
||||||
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
|
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
|
||||||
|
|
||||||
// Either the string or user must provide the type, if they both do they
|
// Either the string or user must provide the type, if they both do they
|
||||||
// must agree.
|
// must agree.
|
||||||
if (!optParsedType && !optType)
|
if (!optParsedType && !optAlgo)
|
||||||
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest);
|
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest);
|
||||||
else if (optParsedType && optType && *optParsedType != *optType)
|
else if (optParsedType && optAlgo && *optParsedType != *optAlgo)
|
||||||
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
|
throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo));
|
||||||
|
|
||||||
HashType hashType = optParsedType ? *optParsedType : *optType;
|
HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo;
|
||||||
return Hash(rest, hashType, isSRI);
|
return Hash(rest, hashAlgo, isSRI);
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type)
|
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo)
|
||||||
{
|
{
|
||||||
return Hash(s, type, false);
|
return Hash(s, algo, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI)
|
||||||
: Hash(type)
|
: Hash(algo)
|
||||||
{
|
{
|
||||||
if (!isSRI && rest.size() == base16Len()) {
|
if (!isSRI && rest.size() == base16Len()) {
|
||||||
|
|
||||||
|
@ -230,8 +231,8 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
||||||
for (unsigned int n = 0; n < rest.size(); ++n) {
|
for (unsigned int n = 0; n < rest.size(); ++n) {
|
||||||
char c = rest[rest.size() - n - 1];
|
char c = rest[rest.size() - n - 1];
|
||||||
unsigned char digit;
|
unsigned char digit;
|
||||||
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
|
for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */
|
||||||
if (base32Chars[digit] == c) break;
|
if (nix32Chars[digit] == c) break;
|
||||||
if (digit >= 32)
|
if (digit >= 32)
|
||||||
throw BadHash("invalid base-32 hash '%s'", rest);
|
throw BadHash("invalid base-32 hash '%s'", rest);
|
||||||
unsigned int b = n * 5;
|
unsigned int b = n * 5;
|
||||||
|
@ -257,19 +258,19 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
|
throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo));
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
|
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha)
|
||||||
{
|
{
|
||||||
if (hashStr.empty()) {
|
if (hashStr.empty()) {
|
||||||
if (!ht)
|
if (!ha)
|
||||||
throw BadHash("empty hash requires explicit hash type");
|
throw BadHash("empty hash requires explicit hash type");
|
||||||
Hash h(*ht);
|
Hash h(*ha);
|
||||||
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
|
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
|
||||||
return h;
|
return h;
|
||||||
} else
|
} else
|
||||||
return Hash::parseAny(hashStr, ht);
|
return Hash::parseAny(hashStr, ha);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -282,58 +283,58 @@ union Ctx
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
static void start(HashType ht, Ctx & ctx)
|
static void start(HashAlgorithm ha, Ctx & ctx)
|
||||||
{
|
{
|
||||||
if (ht == htMD5) MD5_Init(&ctx.md5);
|
if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5);
|
||||||
else if (ht == htSHA1) SHA1_Init(&ctx.sha1);
|
else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1);
|
||||||
else if (ht == htSHA256) SHA256_Init(&ctx.sha256);
|
else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256);
|
||||||
else if (ht == htSHA512) SHA512_Init(&ctx.sha512);
|
else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void update(HashType ht, Ctx & ctx,
|
static void update(HashAlgorithm ha, Ctx & ctx,
|
||||||
std::string_view data)
|
std::string_view data)
|
||||||
{
|
{
|
||||||
if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||||
else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
||||||
else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
||||||
else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash)
|
||||||
{
|
{
|
||||||
if (ht == htMD5) MD5_Final(hash, &ctx.md5);
|
if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5);
|
||||||
else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1);
|
else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1);
|
||||||
else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256);
|
else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256);
|
||||||
else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512);
|
else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Hash hashString(HashType ht, std::string_view s)
|
Hash hashString(HashAlgorithm ha, std::string_view s)
|
||||||
{
|
{
|
||||||
Ctx ctx;
|
Ctx ctx;
|
||||||
Hash hash(ht);
|
Hash hash(ha);
|
||||||
start(ht, ctx);
|
start(ha, ctx);
|
||||||
update(ht, ctx, s);
|
update(ha, ctx, s);
|
||||||
finish(ht, ctx, hash.hash);
|
finish(ha, ctx, hash.hash);
|
||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Hash hashFile(HashType ht, const Path & path)
|
Hash hashFile(HashAlgorithm ha, const Path & path)
|
||||||
{
|
{
|
||||||
HashSink sink(ht);
|
HashSink sink(ha);
|
||||||
readFile(path, sink);
|
readFile(path, sink);
|
||||||
return sink.finish().first;
|
return sink.finish().first;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
HashSink::HashSink(HashType ht) : ht(ht)
|
HashSink::HashSink(HashAlgorithm ha) : ha(ha)
|
||||||
{
|
{
|
||||||
ctx = new Ctx;
|
ctx = new Ctx;
|
||||||
bytes = 0;
|
bytes = 0;
|
||||||
start(ht, *ctx);
|
start(ha, *ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
HashSink::~HashSink()
|
HashSink::~HashSink()
|
||||||
|
@ -345,14 +346,14 @@ HashSink::~HashSink()
|
||||||
void HashSink::writeUnbuffered(std::string_view data)
|
void HashSink::writeUnbuffered(std::string_view data)
|
||||||
{
|
{
|
||||||
bytes += data.size();
|
bytes += data.size();
|
||||||
update(ht, *ctx, data);
|
update(ha, *ctx, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
HashResult HashSink::finish()
|
HashResult HashSink::finish()
|
||||||
{
|
{
|
||||||
flush();
|
flush();
|
||||||
Hash hash(ht);
|
Hash hash(ha);
|
||||||
nix::finish(ht, *ctx, hash.hash);
|
nix::finish(ha, *ctx, hash.hash);
|
||||||
return HashResult(hash, bytes);
|
return HashResult(hash, bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -360,16 +361,16 @@ HashResult HashSink::currentHash()
|
||||||
{
|
{
|
||||||
flush();
|
flush();
|
||||||
Ctx ctx2 = *ctx;
|
Ctx ctx2 = *ctx;
|
||||||
Hash hash(ht);
|
Hash hash(ha);
|
||||||
nix::finish(ht, ctx2, hash.hash);
|
nix::finish(ha, ctx2, hash.hash);
|
||||||
return HashResult(hash, bytes);
|
return HashResult(hash, bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
HashResult hashPath(
|
HashResult hashPath(
|
||||||
HashType ht, const Path & path, PathFilter & filter)
|
HashAlgorithm ha, const Path & path, PathFilter & filter)
|
||||||
{
|
{
|
||||||
HashSink sink(ht);
|
HashSink sink(ha);
|
||||||
dumpPath(path, sink, filter);
|
dumpPath(path, sink, filter);
|
||||||
return sink.finish();
|
return sink.finish();
|
||||||
}
|
}
|
||||||
|
@ -377,7 +378,7 @@ HashResult hashPath(
|
||||||
|
|
||||||
Hash compressHash(const Hash & hash, unsigned int newSize)
|
Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||||
{
|
{
|
||||||
Hash h(hash.type);
|
Hash h(hash.algo);
|
||||||
h.hashSize = newSize;
|
h.hashSize = newSize;
|
||||||
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
||||||
h.hash[i % newSize] ^= hash.hash[i];
|
h.hash[i % newSize] ^= hash.hash[i];
|
||||||
|
@ -388,7 +389,11 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||||
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName)
|
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName)
|
||||||
{
|
{
|
||||||
if (hashFormatName == "base16") return HashFormat::Base16;
|
if (hashFormatName == "base16") return HashFormat::Base16;
|
||||||
if (hashFormatName == "base32") return HashFormat::Base32;
|
if (hashFormatName == "nix32") return HashFormat::Nix32;
|
||||||
|
if (hashFormatName == "base32") {
|
||||||
|
warn(R"("base32" is a deprecated alias for hash format "nix32".)");
|
||||||
|
return HashFormat::Nix32;
|
||||||
|
}
|
||||||
if (hashFormatName == "base64") return HashFormat::Base64;
|
if (hashFormatName == "base64") return HashFormat::Base64;
|
||||||
if (hashFormatName == "sri") return HashFormat::SRI;
|
if (hashFormatName == "sri") return HashFormat::SRI;
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
|
@ -407,8 +412,8 @@ std::string_view printHashFormat(HashFormat HashFormat)
|
||||||
switch (HashFormat) {
|
switch (HashFormat) {
|
||||||
case HashFormat::Base64:
|
case HashFormat::Base64:
|
||||||
return "base64";
|
return "base64";
|
||||||
case HashFormat::Base32:
|
case HashFormat::Nix32:
|
||||||
return "base32";
|
return "nix32";
|
||||||
case HashFormat::Base16:
|
case HashFormat::Base16:
|
||||||
return "base16";
|
return "base16";
|
||||||
case HashFormat::SRI:
|
case HashFormat::SRI:
|
||||||
|
@ -420,31 +425,31 @@ std::string_view printHashFormat(HashFormat HashFormat)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<HashType> parseHashTypeOpt(std::string_view s)
|
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s)
|
||||||
{
|
{
|
||||||
if (s == "md5") return htMD5;
|
if (s == "md5") return HashAlgorithm::MD5;
|
||||||
if (s == "sha1") return htSHA1;
|
if (s == "sha1") return HashAlgorithm::SHA1;
|
||||||
if (s == "sha256") return htSHA256;
|
if (s == "sha256") return HashAlgorithm::SHA256;
|
||||||
if (s == "sha512") return htSHA512;
|
if (s == "sha512") return HashAlgorithm::SHA512;
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
HashType parseHashType(std::string_view s)
|
HashAlgorithm parseHashAlgo(std::string_view s)
|
||||||
{
|
{
|
||||||
auto opt_h = parseHashTypeOpt(s);
|
auto opt_h = parseHashAlgoOpt(s);
|
||||||
if (opt_h)
|
if (opt_h)
|
||||||
return *opt_h;
|
return *opt_h;
|
||||||
else
|
else
|
||||||
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string_view printHashType(HashType ht)
|
std::string_view printHashAlgo(HashAlgorithm ha)
|
||||||
{
|
{
|
||||||
switch (ht) {
|
switch (ha) {
|
||||||
case htMD5: return "md5";
|
case HashAlgorithm::MD5: return "md5";
|
||||||
case htSHA1: return "sha1";
|
case HashAlgorithm::SHA1: return "sha1";
|
||||||
case htSHA256: return "sha256";
|
case HashAlgorithm::SHA256: return "sha256";
|
||||||
case htSHA512: return "sha512";
|
case HashAlgorithm::SHA512: return "sha512";
|
||||||
default:
|
default:
|
||||||
// illegal hash type enum value internally, as opposed to external input
|
// illegal hash type enum value internally, as opposed to external input
|
||||||
// which should be validated with nice error message.
|
// which should be validated with nice error message.
|
||||||
|
|
|
@ -12,7 +12,7 @@ namespace nix {
|
||||||
MakeError(BadHash, Error);
|
MakeError(BadHash, Error);
|
||||||
|
|
||||||
|
|
||||||
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
|
enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 };
|
||||||
|
|
||||||
|
|
||||||
const int md5HashSize = 16;
|
const int md5HashSize = 16;
|
||||||
|
@ -20,9 +20,9 @@ const int sha1HashSize = 20;
|
||||||
const int sha256HashSize = 32;
|
const int sha256HashSize = 32;
|
||||||
const int sha512HashSize = 64;
|
const int sha512HashSize = 64;
|
||||||
|
|
||||||
extern std::set<std::string> hashTypes;
|
extern const std::set<std::string> hashAlgorithms;
|
||||||
|
|
||||||
extern const std::string base32Chars;
|
extern const std::string nix32Chars;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Enumeration representing the hash formats.
|
* @brief Enumeration representing the hash formats.
|
||||||
|
@ -31,8 +31,8 @@ enum struct HashFormat : int {
|
||||||
/// @brief Base 64 encoding.
|
/// @brief Base 64 encoding.
|
||||||
/// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4).
|
/// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4).
|
||||||
Base64,
|
Base64,
|
||||||
/// @brief Nix-specific base-32 encoding. @see base32Chars
|
/// @brief Nix-specific base-32 encoding. @see nix32Chars
|
||||||
Base32,
|
Nix32,
|
||||||
/// @brief Lowercase hexadecimal encoding. @see base16Chars
|
/// @brief Lowercase hexadecimal encoding. @see base16Chars
|
||||||
Base16,
|
Base16,
|
||||||
/// @brief "<hash algo>:<Base 64 hash>", format of the SRI integrity attribute.
|
/// @brief "<hash algo>:<Base 64 hash>", format of the SRI integrity attribute.
|
||||||
|
@ -40,6 +40,7 @@ enum struct HashFormat : int {
|
||||||
SRI
|
SRI
|
||||||
};
|
};
|
||||||
|
|
||||||
|
extern const std::set<std::string> hashFormats;
|
||||||
|
|
||||||
struct Hash
|
struct Hash
|
||||||
{
|
{
|
||||||
|
@ -47,12 +48,12 @@ struct Hash
|
||||||
size_t hashSize = 0;
|
size_t hashSize = 0;
|
||||||
uint8_t hash[maxHashSize] = {};
|
uint8_t hash[maxHashSize] = {};
|
||||||
|
|
||||||
HashType type;
|
HashAlgorithm algo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a zero-filled hash object.
|
* Create a zero-filled hash object.
|
||||||
*/
|
*/
|
||||||
Hash(HashType type);
|
explicit Hash(HashAlgorithm algo);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse the hash from a string representation in the format
|
* Parse the hash from a string representation in the format
|
||||||
|
@ -61,7 +62,7 @@ struct Hash
|
||||||
* is not present, then the hash type must be specified in the
|
* is not present, then the hash type must be specified in the
|
||||||
* string.
|
* string.
|
||||||
*/
|
*/
|
||||||
static Hash parseAny(std::string_view s, std::optional<HashType> type);
|
static Hash parseAny(std::string_view s, std::optional<HashAlgorithm> optAlgo);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse a hash from a string representation like the above, except the
|
* Parse a hash from a string representation like the above, except the
|
||||||
|
@ -73,7 +74,7 @@ struct Hash
|
||||||
* Parse a plain hash that musst not have any prefix indicating the type.
|
* Parse a plain hash that musst not have any prefix indicating the type.
|
||||||
* The type is passed in to disambiguate.
|
* The type is passed in to disambiguate.
|
||||||
*/
|
*/
|
||||||
static Hash parseNonSRIUnprefixed(std::string_view s, HashType type);
|
static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo);
|
||||||
|
|
||||||
static Hash parseSRI(std::string_view original);
|
static Hash parseSRI(std::string_view original);
|
||||||
|
|
||||||
|
@ -82,7 +83,7 @@ private:
|
||||||
* The type must be provided, the string view must not include <type>
|
* The type must be provided, the string view must not include <type>
|
||||||
* prefix. `isSRI` helps disambigate the various base-* encodings.
|
* prefix. `isSRI` helps disambigate the various base-* encodings.
|
||||||
*/
|
*/
|
||||||
Hash(std::string_view s, HashType type, bool isSRI);
|
Hash(std::string_view s, HashAlgorithm algo, bool isSRI);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/**
|
/**
|
||||||
|
@ -103,7 +104,7 @@ public:
|
||||||
/**
|
/**
|
||||||
* Returns the length of a base-16 representation of this hash.
|
* Returns the length of a base-16 representation of this hash.
|
||||||
*/
|
*/
|
||||||
size_t base16Len() const
|
[[nodiscard]] size_t base16Len() const
|
||||||
{
|
{
|
||||||
return hashSize * 2;
|
return hashSize * 2;
|
||||||
}
|
}
|
||||||
|
@ -111,7 +112,7 @@ public:
|
||||||
/**
|
/**
|
||||||
* Returns the length of a base-32 representation of this hash.
|
* Returns the length of a base-32 representation of this hash.
|
||||||
*/
|
*/
|
||||||
size_t base32Len() const
|
[[nodiscard]] size_t base32Len() const
|
||||||
{
|
{
|
||||||
return (hashSize * 8 - 1) / 5 + 1;
|
return (hashSize * 8 - 1) / 5 + 1;
|
||||||
}
|
}
|
||||||
|
@ -119,24 +120,24 @@ public:
|
||||||
/**
|
/**
|
||||||
* Returns the length of a base-64 representation of this hash.
|
* Returns the length of a base-64 representation of this hash.
|
||||||
*/
|
*/
|
||||||
size_t base64Len() const
|
[[nodiscard]] size_t base64Len() const
|
||||||
{
|
{
|
||||||
return ((4 * hashSize / 3) + 3) & ~3;
|
return ((4 * hashSize / 3) + 3) & ~3;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a string representation of the hash, in base-16, base-32
|
* Return a string representation of the hash, in base-16, base-32
|
||||||
* or base-64. By default, this is prefixed by the hash type
|
* or base-64. By default, this is prefixed by the hash algo
|
||||||
* (e.g. "sha256:").
|
* (e.g. "sha256:").
|
||||||
*/
|
*/
|
||||||
std::string to_string(HashFormat hashFormat, bool includeType) const;
|
[[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeAlgo) const;
|
||||||
|
|
||||||
std::string gitRev() const
|
[[nodiscard]] std::string gitRev() const
|
||||||
{
|
{
|
||||||
return to_string(HashFormat::Base16, false);
|
return to_string(HashFormat::Base16, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string gitShortRev() const
|
[[nodiscard]] std::string gitShortRev() const
|
||||||
{
|
{
|
||||||
return std::string(to_string(HashFormat::Base16, false), 0, 7);
|
return std::string(to_string(HashFormat::Base16, false), 0, 7);
|
||||||
}
|
}
|
||||||
|
@ -147,7 +148,7 @@ public:
|
||||||
/**
|
/**
|
||||||
* Helper that defaults empty hashes to the 0 hash.
|
* Helper that defaults empty hashes to the 0 hash.
|
||||||
*/
|
*/
|
||||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht);
|
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print a hash in base-16 if it's MD5, or base-32 otherwise.
|
* Print a hash in base-16 if it's MD5, or base-32 otherwise.
|
||||||
|
@ -157,14 +158,14 @@ std::string printHash16or32(const Hash & hash);
|
||||||
/**
|
/**
|
||||||
* Compute the hash of the given string.
|
* Compute the hash of the given string.
|
||||||
*/
|
*/
|
||||||
Hash hashString(HashType ht, std::string_view s);
|
Hash hashString(HashAlgorithm ha, std::string_view s);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compute the hash of the given file, hashing its contents directly.
|
* Compute the hash of the given file, hashing its contents directly.
|
||||||
*
|
*
|
||||||
* (Metadata, such as the executable permission bit, is ignored.)
|
* (Metadata, such as the executable permission bit, is ignored.)
|
||||||
*/
|
*/
|
||||||
Hash hashFile(HashType ht, const Path & path);
|
Hash hashFile(HashAlgorithm ha, const Path & path);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compute the hash of the given path, serializing as a Nix Archive and
|
* Compute the hash of the given path, serializing as a Nix Archive and
|
||||||
|
@ -173,7 +174,7 @@ Hash hashFile(HashType ht, const Path & path);
|
||||||
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
|
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
|
||||||
*/
|
*/
|
||||||
typedef std::pair<Hash, uint64_t> HashResult;
|
typedef std::pair<Hash, uint64_t> HashResult;
|
||||||
HashResult hashPath(HashType ht, const Path & path,
|
HashResult hashPath(HashAlgorithm ha, const Path & path,
|
||||||
PathFilter & filter = defaultPathFilter);
|
PathFilter & filter = defaultPathFilter);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -200,17 +201,17 @@ std::string_view printHashFormat(HashFormat hashFormat);
|
||||||
/**
|
/**
|
||||||
* Parse a string representing a hash type.
|
* Parse a string representing a hash type.
|
||||||
*/
|
*/
|
||||||
HashType parseHashType(std::string_view s);
|
HashAlgorithm parseHashAlgo(std::string_view s);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Will return nothing on parse error
|
* Will return nothing on parse error
|
||||||
*/
|
*/
|
||||||
std::optional<HashType> parseHashTypeOpt(std::string_view s);
|
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* And the reverse.
|
* And the reverse.
|
||||||
*/
|
*/
|
||||||
std::string_view printHashType(HashType ht);
|
std::string_view printHashAlgo(HashAlgorithm ha);
|
||||||
|
|
||||||
|
|
||||||
union Ctx;
|
union Ctx;
|
||||||
|
@ -223,12 +224,12 @@ struct AbstractHashSink : virtual Sink
|
||||||
class HashSink : public BufferedSink, public AbstractHashSink
|
class HashSink : public BufferedSink, public AbstractHashSink
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
HashType ht;
|
HashAlgorithm ha;
|
||||||
Ctx * ctx;
|
Ctx * ctx;
|
||||||
uint64_t bytes;
|
uint64_t bytes;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
HashSink(HashType ht);
|
HashSink(HashAlgorithm ha);
|
||||||
HashSink(const HashSink & h);
|
HashSink(const HashSink & h);
|
||||||
~HashSink();
|
~HashSink();
|
||||||
void writeUnbuffered(std::string_view data) override;
|
void writeUnbuffered(std::string_view data) override;
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
#include "posix-source-accessor.hh"
|
#include "posix-source-accessor.hh"
|
||||||
#include "signals.hh"
|
#include "signals.hh"
|
||||||
|
#include "sync.hh"
|
||||||
|
|
||||||
|
#include <unordered_map>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -8,9 +11,9 @@ void PosixSourceAccessor::readFile(
|
||||||
Sink & sink,
|
Sink & sink,
|
||||||
std::function<void(uint64_t)> sizeCallback)
|
std::function<void(uint64_t)> sizeCallback)
|
||||||
{
|
{
|
||||||
// FIXME: add O_NOFOLLOW since symlinks should be resolved by the
|
assertNoSymlinks(path);
|
||||||
// caller?
|
|
||||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
|
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW);
|
||||||
if (!fd)
|
if (!fd)
|
||||||
throw SysError("opening file '%1%'", path);
|
throw SysError("opening file '%1%'", path);
|
||||||
|
|
||||||
|
@ -42,30 +45,55 @@ void PosixSourceAccessor::readFile(
|
||||||
|
|
||||||
bool PosixSourceAccessor::pathExists(const CanonPath & path)
|
bool PosixSourceAccessor::pathExists(const CanonPath & path)
|
||||||
{
|
{
|
||||||
|
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||||
return nix::pathExists(path.abs());
|
return nix::pathExists(path.abs());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & path)
|
||||||
|
{
|
||||||
|
static Sync<std::unordered_map<CanonPath, std::optional<struct stat>>> _cache;
|
||||||
|
|
||||||
|
{
|
||||||
|
auto cache(_cache.lock());
|
||||||
|
auto i = cache->find(path);
|
||||||
|
if (i != cache->end()) return i->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<struct stat> st{std::in_place};
|
||||||
|
if (::lstat(path.c_str(), &*st)) {
|
||||||
|
if (errno == ENOENT || errno == ENOTDIR)
|
||||||
|
st.reset();
|
||||||
|
else
|
||||||
|
throw SysError("getting status of '%s'", showPath(path));
|
||||||
|
}
|
||||||
|
|
||||||
|
auto cache(_cache.lock());
|
||||||
|
if (cache->size() >= 16384) cache->clear();
|
||||||
|
cache->emplace(path, st);
|
||||||
|
|
||||||
|
return st;
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<SourceAccessor::Stat> PosixSourceAccessor::maybeLstat(const CanonPath & path)
|
std::optional<SourceAccessor::Stat> PosixSourceAccessor::maybeLstat(const CanonPath & path)
|
||||||
{
|
{
|
||||||
struct stat st;
|
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||||
if (::lstat(path.c_str(), &st)) {
|
auto st = cachedLstat(path);
|
||||||
if (errno == ENOENT) return std::nullopt;
|
if (!st) return std::nullopt;
|
||||||
throw SysError("getting status of '%s'", showPath(path));
|
mtime = std::max(mtime, st->st_mtime);
|
||||||
}
|
|
||||||
mtime = std::max(mtime, st.st_mtime);
|
|
||||||
return Stat {
|
return Stat {
|
||||||
.type =
|
.type =
|
||||||
S_ISREG(st.st_mode) ? tRegular :
|
S_ISREG(st->st_mode) ? tRegular :
|
||||||
S_ISDIR(st.st_mode) ? tDirectory :
|
S_ISDIR(st->st_mode) ? tDirectory :
|
||||||
S_ISLNK(st.st_mode) ? tSymlink :
|
S_ISLNK(st->st_mode) ? tSymlink :
|
||||||
tMisc,
|
tMisc,
|
||||||
.fileSize = S_ISREG(st.st_mode) ? std::optional<uint64_t>(st.st_size) : std::nullopt,
|
.fileSize = S_ISREG(st->st_mode) ? std::optional<uint64_t>(st->st_size) : std::nullopt,
|
||||||
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR,
|
.isExecutable = S_ISREG(st->st_mode) && st->st_mode & S_IXUSR,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path)
|
SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path)
|
||||||
{
|
{
|
||||||
|
assertNoSymlinks(path);
|
||||||
DirEntries res;
|
DirEntries res;
|
||||||
for (auto & entry : nix::readDirectory(path.abs())) {
|
for (auto & entry : nix::readDirectory(path.abs())) {
|
||||||
std::optional<Type> type;
|
std::optional<Type> type;
|
||||||
|
@ -81,6 +109,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath &
|
||||||
|
|
||||||
std::string PosixSourceAccessor::readLink(const CanonPath & path)
|
std::string PosixSourceAccessor::readLink(const CanonPath & path)
|
||||||
{
|
{
|
||||||
|
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||||
return nix::readLink(path.abs());
|
return nix::readLink(path.abs());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,4 +118,14 @@ std::optional<CanonPath> PosixSourceAccessor::getPhysicalPath(const CanonPath &
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void PosixSourceAccessor::assertNoSymlinks(CanonPath path)
|
||||||
|
{
|
||||||
|
while (!path.isRoot()) {
|
||||||
|
auto st = cachedLstat(path);
|
||||||
|
if (st && S_ISLNK(st->st_mode))
|
||||||
|
throw Error("path '%s' is a symlink", showPath(path));
|
||||||
|
path.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,15 @@ struct PosixSourceAccessor : virtual SourceAccessor
|
||||||
std::string readLink(const CanonPath & path) override;
|
std::string readLink(const CanonPath & path) override;
|
||||||
|
|
||||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override;
|
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throw an error if `path` or any of its ancestors are symlinks.
|
||||||
|
*/
|
||||||
|
void assertNoSymlinks(CanonPath path);
|
||||||
|
|
||||||
|
std::optional<struct stat> cachedLstat(const CanonPath & path);
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,8 @@ static void search(
|
||||||
static bool isBase32[256];
|
static bool isBase32[256];
|
||||||
std::call_once(initialised, [](){
|
std::call_once(initialised, [](){
|
||||||
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
|
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
|
||||||
for (unsigned int i = 0; i < base32Chars.size(); ++i)
|
for (unsigned int i = 0; i < nix32Chars.size(); ++i)
|
||||||
isBase32[(unsigned char) base32Chars[i]] = true;
|
isBase32[(unsigned char) nix32Chars[i]] = true;
|
||||||
});
|
});
|
||||||
|
|
||||||
for (size_t i = 0; i + refLength <= s.size(); ) {
|
for (size_t i = 0; i + refLength <= s.size(); ) {
|
||||||
|
@ -110,8 +110,8 @@ void RewritingSink::flush()
|
||||||
prev.clear();
|
prev.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus)
|
HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus)
|
||||||
: hashSink(ht)
|
: hashSink(ha)
|
||||||
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
|
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,7 @@ struct HashModuloSink : AbstractHashSink
|
||||||
HashSink hashSink;
|
HashSink hashSink;
|
||||||
RewritingSink rewritingSink;
|
RewritingSink rewritingSink;
|
||||||
|
|
||||||
HashModuloSink(HashType ht, const std::string & modulus);
|
HashModuloSink(HashAlgorithm ha, const std::string & modulus);
|
||||||
|
|
||||||
void operator () (std::string_view data) override;
|
void operator () (std::string_view data) override;
|
||||||
|
|
||||||
|
|
|
@ -41,9 +41,9 @@ void SourceAccessor::readFile(
|
||||||
Hash SourceAccessor::hashPath(
|
Hash SourceAccessor::hashPath(
|
||||||
const CanonPath & path,
|
const CanonPath & path,
|
||||||
PathFilter & filter,
|
PathFilter & filter,
|
||||||
HashType ht)
|
HashAlgorithm ha)
|
||||||
{
|
{
|
||||||
HashSink sink(ht);
|
HashSink sink(ha);
|
||||||
dumpPath(path, sink, filter);
|
dumpPath(path, sink, filter);
|
||||||
return sink.finish().first;
|
return sink.finish().first;
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,13 @@ struct SourceAccessor
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the contents of a file as a string.
|
* Return the contents of a file as a string.
|
||||||
|
*
|
||||||
|
* @note Unlike Unix, this method should *not* follow symlinks. Nix
|
||||||
|
* by default wants to manipulate symlinks explicitly, and not
|
||||||
|
* implictly follow them, as they are frequently untrusted user data
|
||||||
|
* and thus may point to arbitrary locations. Acting on the targets
|
||||||
|
* targets of symlinks should only occasionally be done, and only
|
||||||
|
* with care.
|
||||||
*/
|
*/
|
||||||
virtual std::string readFile(const CanonPath & path);
|
virtual std::string readFile(const CanonPath & path);
|
||||||
|
|
||||||
|
@ -34,7 +41,10 @@ struct SourceAccessor
|
||||||
* called with the size of the file before any data is written to
|
* called with the size of the file before any data is written to
|
||||||
* the sink.
|
* the sink.
|
||||||
*
|
*
|
||||||
* Note: subclasses of `SourceAccessor` need to implement at least
|
* @note Like the other `readFile`, this method should *not* follow
|
||||||
|
* symlinks.
|
||||||
|
*
|
||||||
|
* @note subclasses of `SourceAccessor` need to implement at least
|
||||||
* one of the `readFile()` variants.
|
* one of the `readFile()` variants.
|
||||||
*/
|
*/
|
||||||
virtual void readFile(
|
virtual void readFile(
|
||||||
|
@ -87,6 +97,9 @@ struct SourceAccessor
|
||||||
|
|
||||||
typedef std::map<std::string, DirEntry> DirEntries;
|
typedef std::map<std::string, DirEntry> DirEntries;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @note Like `readFile`, this method should *not* follow symlinks.
|
||||||
|
*/
|
||||||
virtual DirEntries readDirectory(const CanonPath & path) = 0;
|
virtual DirEntries readDirectory(const CanonPath & path) = 0;
|
||||||
|
|
||||||
virtual std::string readLink(const CanonPath & path) = 0;
|
virtual std::string readLink(const CanonPath & path) = 0;
|
||||||
|
@ -99,7 +112,7 @@ struct SourceAccessor
|
||||||
Hash hashPath(
|
Hash hashPath(
|
||||||
const CanonPath & path,
|
const CanonPath & path,
|
||||||
PathFilter & filter = defaultPathFilter,
|
PathFilter & filter = defaultPathFilter,
|
||||||
HashType ht = htSHA256);
|
HashAlgorithm ha = HashAlgorithm::SHA256);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a corresponding path in the root filesystem, if
|
* Return a corresponding path in the root filesystem, if
|
||||||
|
|
|
@ -53,6 +53,7 @@ TarArchive::TarArchive(Source & source, bool raw) : buffer(65536)
|
||||||
archive_read_support_format_raw(archive);
|
archive_read_support_format_raw(archive);
|
||||||
archive_read_support_format_empty(archive);
|
archive_read_support_format_empty(archive);
|
||||||
}
|
}
|
||||||
|
archive_read_set_option(archive, NULL, "mac-ext", NULL);
|
||||||
check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)");
|
check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,6 +64,7 @@ TarArchive::TarArchive(const Path & path)
|
||||||
|
|
||||||
archive_read_support_filter_all(archive);
|
archive_read_support_filter_all(archive);
|
||||||
archive_read_support_format_all(archive);
|
archive_read_support_format_all(archive);
|
||||||
|
archive_read_set_option(archive, NULL, "mac-ext", NULL);
|
||||||
check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s");
|
check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue