mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 05:56:15 +02:00
Merge remote-tracking branch 'upstream/master' into overlayfs-store
This commit is contained in:
commit
5c1cb0b696
941 changed files with 10981 additions and 4439 deletions
18
.github/PULL_REQUEST_TEMPLATE.md
vendored
18
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -10,24 +10,6 @@
|
|||
|
||||
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
|
||||
|
||||
# Checklist for maintainers
|
||||
|
||||
<!-- Contributors: please leave this as is -->
|
||||
|
||||
Maintainers: tick if completed or explain if not relevant
|
||||
|
||||
- [ ] agreed on idea
|
||||
- [ ] agreed on implementation strategy
|
||||
- [ ] tests, as appropriate
|
||||
- functional tests - `tests/**.sh`
|
||||
- unit tests - `src/*/tests`
|
||||
- integration tests - `tests/nixos/*`
|
||||
- [ ] documentation in the manual
|
||||
- [ ] documentation in the internal API docs
|
||||
- [ ] code and comments are self-explanatory
|
||||
- [ ] commit message explains why the change was made
|
||||
- [ ] new feature or incompatible change: updated release notes
|
||||
|
||||
# Priorities
|
||||
|
||||
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
|
||||
|
|
2
.github/labeler.yml
vendored
2
.github/labeler.yml
vendored
|
@ -20,4 +20,4 @@
|
|||
# Unit tests
|
||||
- src/*/tests/**/*
|
||||
# Functional and integration tests
|
||||
- tests/**/*
|
||||
- tests/functional/**/*
|
||||
|
|
4
.github/workflows/backport.yml
vendored
4
.github/workflows/backport.yml
vendored
|
@ -14,14 +14,14 @@ jobs:
|
|||
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
# required to find all branches
|
||||
fetch-depth: 0
|
||||
- name: Create backport PRs
|
||||
# should be kept in sync with `version`
|
||||
uses: zeebe-io/backport-action@v1.3.1
|
||||
uses: zeebe-io/backport-action@v1.4.0
|
||||
with:
|
||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
41
.github/workflows/ci.yml
vendored
41
.github/workflows/ci.yml
vendored
|
@ -17,10 +17,10 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: cachix/install-nix-action@v23
|
||||
with:
|
||||
# The sandbox would otherwise be disabled by default on Darwin
|
||||
extra_nix_config: "sandbox = true"
|
||||
|
@ -58,11 +58,11 @@ jobs:
|
|||
outputs:
|
||||
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: cachix/install-nix-action@v23
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||
- uses: cachix/cachix-action@v12
|
||||
|
@ -82,9 +82,9 @@ jobs:
|
|||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: cachix/install-nix-action@v23
|
||||
with:
|
||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||
|
@ -101,6 +101,9 @@ jobs:
|
|||
|
||||
docker_push_image:
|
||||
needs: [check_secrets, tests]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: >-
|
||||
github.event_name == 'push' &&
|
||||
github.ref_name == 'master' &&
|
||||
|
@ -108,10 +111,10 @@ jobs:
|
|||
needs.check_secrets.outputs.docker == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v22
|
||||
- uses: cachix/install-nix-action@v23
|
||||
with:
|
||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
|
@ -126,10 +129,30 @@ jobs:
|
|||
- run: docker load -i ./result/image.tar.gz
|
||||
- run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
|
||||
- run: docker tag nix:$NIX_VERSION nixos/nix:master
|
||||
# We'll deploy the newly built image to both Docker Hub and Github Container Registry.
|
||||
#
|
||||
# Push to Docker Hub first
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- run: docker push nixos/nix:$NIX_VERSION
|
||||
- run: docker push nixos/nix:master
|
||||
# Push to GitHub Container Registry as well
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Push image
|
||||
run: |
|
||||
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/nix
|
||||
# Change all uppercase to lowercase
|
||||
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
||||
|
||||
docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
|
||||
docker tag nix:$NIX_VERSION $IMAGE_ID:master
|
||||
docker push $IMAGE_ID:$NIX_VERSION
|
||||
docker push $IMAGE_ID:master
|
||||
|
|
2
.github/workflows/hydra_status.yml
vendored
2
.github/workflows/hydra_status.yml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
|||
if: github.repository_owner == 'NixOS'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: bash scripts/check-hydra-status.sh
|
||||
|
|
39
.gitignore
vendored
39
.gitignore
vendored
|
@ -79,24 +79,24 @@ perl/Makefile.config
|
|||
|
||||
/src/build-remote/build-remote
|
||||
|
||||
# /tests/
|
||||
/tests/test-tmp
|
||||
/tests/common/vars-and-functions.sh
|
||||
/tests/result*
|
||||
/tests/restricted-innocent
|
||||
/tests/shell
|
||||
/tests/shell.drv
|
||||
/tests/config.nix
|
||||
/tests/ca/config.nix
|
||||
/tests/dyn-drv/config.nix
|
||||
/tests/repl-result-out
|
||||
/tests/test-libstoreconsumer/test-libstoreconsumer
|
||||
# /tests/functional/
|
||||
/tests/functional/test-tmp
|
||||
/tests/functional/common/vars-and-functions.sh
|
||||
/tests/functional/result*
|
||||
/tests/functional/restricted-innocent
|
||||
/tests/functional/shell
|
||||
/tests/functional/shell.drv
|
||||
/tests/functional/config.nix
|
||||
/tests/functional/ca/config.nix
|
||||
/tests/functional/dyn-drv/config.nix
|
||||
/tests/functional/repl-result-out
|
||||
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
||||
|
||||
# /tests/lang/
|
||||
/tests/lang/*.out
|
||||
/tests/lang/*.out.xml
|
||||
/tests/lang/*.err
|
||||
/tests/lang/*.ast
|
||||
# /tests/functional/lang/
|
||||
/tests/functional/lang/*.out
|
||||
/tests/functional/lang/*.out.xml
|
||||
/tests/functional/lang/*.err
|
||||
/tests/functional/lang/*.ast
|
||||
|
||||
/perl/lib/Nix/Config.pm
|
||||
/perl/lib/Nix/Store.cc
|
||||
|
@ -138,4 +138,9 @@ nix-rust/target
|
|||
|
||||
result
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# clangd and possibly more
|
||||
.cache/
|
||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
|||
2.18.0
|
||||
2.19.0
|
||||
|
|
|
@ -24,30 +24,51 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
|||
|
||||
## Making changes to Nix
|
||||
|
||||
1. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make.
|
||||
There are many open pull requests that might already do what you intent to work on.
|
||||
1. Search for related issues that cover what you're going to work on.
|
||||
It could help to mention there that you will work on the issue.
|
||||
|
||||
Issues labeled [good first issue](https://github.com/NixOS/nix/labels/good%20first%20issue) should be relatively easy to fix and are likely to get merged quickly.
|
||||
Pull requests addressing issues labeled [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) or [RFC](https://github.com/NixOS/nix/labels/RFC) are especially welcomed by maintainers and will receive prioritised review.
|
||||
|
||||
If you are proficient with C++, addressing one of the [popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc) will be highly appreciated by maintainers and Nix users all over the world.
|
||||
For far-reaching changes, please investigate possible blockers and design implications, and coordinate with maintainers before investing too much time in writing code that may not end up getting merged.
|
||||
|
||||
If there is no relevant issue yet and you're not sure whether your change is likely to be accepted, [open an issue](https://github.com/NixOS/nix/issues/new/choose) yourself.
|
||||
|
||||
2. Check for [pull requests](https://github.com/NixOS/nix/pulls) that might already cover the contribution you are about to make.
|
||||
There are many open pull requests that might already do what you intend to work on.
|
||||
You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics.
|
||||
|
||||
2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue.
|
||||
|
||||
Issues labeled ["good first issue"](https://github.com/NixOS/nix/labels/good-first-issue) should be relatively easy to fix and are likely to get merged quickly.
|
||||
Pull requests addressing issues labeled ["idea approved"](https://github.com/NixOS/nix/labels/idea%20approved) are especially welcomed by maintainers and will receive prioritised review.
|
||||
|
||||
3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests.
|
||||
|
||||
For contributions to the command line interface, please check the [CLI guidelines](https://nixos.org/manual/nix/unstable/contributing/cli-guideline.html).
|
||||
|
||||
4. Make your changes!
|
||||
4. Make your change!
|
||||
|
||||
5. [Create a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) for your changes.
|
||||
* [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes.
|
||||
* Clearly explain the problem that you're solving.
|
||||
|
||||
Link related issues to inform interested parties and future contributors about your change.
|
||||
If your pull request closes one or multiple issues, mention that in the description using `Closes: #<number>`, as it will then happen automatically when your change is merged.
|
||||
* Make sure to have [a clean history of commits on your branch by using rebase](https://www.digitalocean.com/community/tutorials/how-to-rebase-and-update-a-pull-request).
|
||||
* Link related issues in your pull request to inform interested parties and future contributors about your change.
|
||||
If your pull request closes one or multiple issues, note that in the description using `Closes: #<number>`, as it will then happen automatically when your change is merged.
|
||||
* [Mark the pull request as draft](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request) if you're not done with the changes.
|
||||
|
||||
6. Do not expect your pull request to be reviewed immediately.
|
||||
Nix maintainers follow a [structured process for reviews and design decisions](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol), which may or may not prioritise your work.
|
||||
|
||||
Following this checklist will make the process smoother for everyone:
|
||||
|
||||
- [ ] Fixes an [idea approved](https://github.com/NixOS/nix/labels/idea%20approved) issue
|
||||
- [ ] Tests, as appropriate:
|
||||
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
||||
- Unit tests – [`src/*/tests`](./src/)
|
||||
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
||||
- [ ] User documentation in the [manual](..doc/manual/src)
|
||||
- [ ] API documentation in header files
|
||||
- [ ] Code and comments are self-explanatory
|
||||
- [ ] Commit message explains **why** the change was made
|
||||
- [ ] New feature or incompatible change: updated [release notes](./doc/manual/src/release-notes/rl-next.md)
|
||||
|
||||
7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams).
|
||||
|
||||
## Making changes to the Nix manual
|
||||
|
|
28
Makefile
28
Makefile
|
@ -1,3 +1,7 @@
|
|||
-include Makefile.config
|
||||
clean-files += Makefile.config
|
||||
|
||||
ifeq ($(ENABLE_BUILD), yes)
|
||||
makefiles = \
|
||||
mk/precompiled-headers.mk \
|
||||
local.mk \
|
||||
|
@ -18,20 +22,24 @@ makefiles = \
|
|||
misc/upstart/local.mk \
|
||||
doc/manual/local.mk \
|
||||
doc/internal-api/local.mk
|
||||
endif
|
||||
|
||||
-include Makefile.config
|
||||
|
||||
ifeq ($(tests), yes)
|
||||
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
||||
UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data
|
||||
makefiles += \
|
||||
src/libutil/tests/local.mk \
|
||||
src/libstore/tests/local.mk \
|
||||
src/libexpr/tests/local.mk \
|
||||
tests/local.mk \
|
||||
tests/ca/local.mk \
|
||||
tests/dyn-drv/local.mk \
|
||||
tests/local-overlay-store/local.mk \
|
||||
tests/test-libstoreconsumer/local.mk \
|
||||
tests/plugins/local.mk
|
||||
src/libexpr/tests/local.mk
|
||||
endif
|
||||
|
||||
ifeq ($(ENABLE_TESTS), yes)
|
||||
makefiles += \
|
||||
tests/functional/local.mk \
|
||||
tests/functional/ca/local.mk \
|
||||
tests/functional/dyn-drv/local.mk \
|
||||
tests/functional/local-overlay-store/local.mk \
|
||||
tests/functional/test-libstoreconsumer/local.mk \
|
||||
tests/functional/plugins/local.mk
|
||||
else
|
||||
makefiles += \
|
||||
mk/disable-tests.mk
|
||||
|
|
|
@ -46,5 +46,6 @@ sandbox_shell = @sandbox_shell@
|
|||
storedir = @storedir@
|
||||
sysconfdir = @sysconfdir@
|
||||
system = @system@
|
||||
tests = @tests@
|
||||
ENABLE_BUILD = @ENABLE_BUILD@
|
||||
ENABLE_TESTS = @ENABLE_TESTS@
|
||||
internal_api_docs = @internal_api_docs@
|
||||
|
|
16
README.md
16
README.md
|
@ -7,29 +7,27 @@ Nix is a powerful package manager for Linux and other Unix systems that makes pa
|
|||
management reliable and reproducible. Please refer to the [Nix manual](https://nixos.org/nix/manual)
|
||||
for more details.
|
||||
|
||||
## Installation
|
||||
## Installation and first steps
|
||||
|
||||
On Linux and macOS the easiest way to install Nix is to run the following shell command
|
||||
(as a user other than root):
|
||||
Visit [nix.dev](https://nix.dev) for [installation instructions](https://nix.dev/tutorials/install-nix) and [beginner tutorials](https://nix.dev/tutorials/first-steps).
|
||||
|
||||
```console
|
||||
$ curl -L https://nixos.org/nix/install | sh
|
||||
```
|
||||
|
||||
Information on additional installation methods is available on the [Nix download page](https://nixos.org/download.html).
|
||||
Full reference documentation can be found in the [Nix manual](https://nixos.org/nix/manual).
|
||||
|
||||
## Building And Developing
|
||||
|
||||
See our [Hacking guide](https://nixos.org/manual/nix/unstable/contributing/hacking.html) in our manual for instruction on how to
|
||||
to set up a development environment and build Nix from source.
|
||||
|
||||
## Contributing
|
||||
|
||||
Check the [contributing guide](./CONTRIBUTING.md) if you want to get involved with developing Nix.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Nix manual](https://nixos.org/nix/manual)
|
||||
- [Nix jobsets on hydra.nixos.org](https://hydra.nixos.org/project/nix)
|
||||
- [NixOS Discourse](https://discourse.nixos.org/)
|
||||
- [Matrix - #nix:nixos.org](https://matrix.to/#/#nix:nixos.org)
|
||||
- [IRC - #nixos on libera.chat](irc://irc.libera.chat/#nixos)
|
||||
|
||||
## License
|
||||
|
||||
|
|
|
@ -59,12 +59,18 @@ index b5d71e62..aed7b0bf 100644
|
|||
GC_bool found_me = FALSE;
|
||||
size_t nthreads = 0;
|
||||
int i;
|
||||
@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void)
|
||||
@@ -851,6 +853,37 @@ GC_INNER void GC_push_all_stacks(void)
|
||||
hi = p->altstack + p->altstack_size;
|
||||
/* FIXME: Need to scan the normal stack too, but how ? */
|
||||
/* FIXME: Assume stack grows down */
|
||||
+ } else {
|
||||
+ if (pthread_getattr_np(p->id, &pattr)) {
|
||||
+#ifdef HAVE_PTHREAD_ATTR_GET_NP
|
||||
+ if (!pthread_attr_init(&pattr)
|
||||
+ || !pthread_attr_get_np(p->id, &pattr))
|
||||
+#else /* HAVE_PTHREAD_GETATTR_NP */
|
||||
+ if (pthread_getattr_np(p->id, &pattr))
|
||||
+#endif
|
||||
+ {
|
||||
+ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
|
||||
+ }
|
||||
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
#! /bin/sh -e
|
||||
rm -f aclocal.m4
|
||||
mkdir -p config
|
||||
exec autoreconf -vfi
|
11
configure.ac
11
configure.ac
|
@ -152,12 +152,17 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
|
|||
LDFLAGS="-latomic $LDFLAGS"
|
||||
fi
|
||||
|
||||
# Running the functional tests without building Nix is useful for testing
|
||||
# different pre-built versions of Nix against each other.
|
||||
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
||||
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
||||
AC_SUBST(ENABLE_BUILD)
|
||||
# Building without tests is useful for bootstrapping with a smaller footprint
|
||||
# or running the tests in a separate derivation. Otherwise, we do compile and
|
||||
# run them.
|
||||
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
||||
tests=$enableval, tests=yes)
|
||||
AC_SUBST(tests)
|
||||
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
||||
AC_SUBST(ENABLE_TESTS)
|
||||
|
||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
||||
|
@ -289,7 +294,7 @@ if test "$gc" = yes; then
|
|||
fi
|
||||
|
||||
|
||||
if test "$tests" = yes; then
|
||||
if test "$ENABLE_TESTS" = yes; then
|
||||
|
||||
# Look for gtest.
|
||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||
|
|
|
@ -54,6 +54,23 @@ INPUT = \
|
|||
src/nix-env \
|
||||
src/nix-store
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
|
||||
# in the source code. If set to NO, only conditional compilation will be
|
||||
# performed. Macro expansion can be done in a controlled way by setting
|
||||
# EXPAND_ONLY_PREDEF to YES.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
MACRO_EXPANSION = YES
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
|
||||
# the macro expansion is limited to the macros specified with the PREDEFINED and
|
||||
# EXPAND_AS_DEFINED tags.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
EXPAND_ONLY_PREDEF = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by the
|
||||
# preprocessor. Note that the INCLUDE_PATH is not recursive, so the setting of
|
||||
|
@ -61,3 +78,16 @@ INPUT = \
|
|||
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
||||
|
||||
INCLUDE_PATH = @RAPIDCHECK_HEADERS@
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||
# tag can be used to specify a list of macro names that should be expanded. The
|
||||
# macro definition that is found in the sources will be used. Use the PREDEFINED
|
||||
# tag if you want to use a different macro definition that overrules the
|
||||
# definition found in the source code.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
EXPAND_AS_DEFINED = \
|
||||
DECLARE_COMMON_SERIALISER \
|
||||
DECLARE_WORKER_SERIALISER \
|
||||
DECLARE_SERVE_SERIALISER \
|
||||
LENGTH_PREFIXED_PROTO_HELPER
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
let
|
||||
inherit (builtins)
|
||||
attrNames attrValues fromJSON listToAttrs mapAttrs
|
||||
attrNames attrValues fromJSON listToAttrs mapAttrs groupBy
|
||||
concatStringsSep concatMap length lessThan replaceStrings sort;
|
||||
inherit (import ./utils.nix) concatStrings optionalString filterAttrs trim squash unique showSettings;
|
||||
inherit (import <nix/utils.nix>) attrsToList concatStrings optionalString filterAttrs trim squash unique;
|
||||
showStoreDocs = import ./generate-store-info.nix;
|
||||
in
|
||||
|
||||
commandDump:
|
||||
inlineHTML: commandDump:
|
||||
|
||||
let
|
||||
|
||||
|
@ -30,7 +31,7 @@ let
|
|||
|
||||
${maybeSubcommands}
|
||||
|
||||
${maybeDocumentation}
|
||||
${maybeStoreDocs}
|
||||
|
||||
${maybeOptions}
|
||||
'';
|
||||
|
@ -40,15 +41,15 @@ let
|
|||
showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "...";
|
||||
arguments = concatStringsSep " " (map showArgument args);
|
||||
in ''
|
||||
`${command}` [*option*...] ${arguments}
|
||||
`${command}` [*option*...] ${arguments}
|
||||
'';
|
||||
|
||||
maybeSubcommands = optionalString (details ? commands && details.commands != {})
|
||||
''
|
||||
where *subcommand* is one of the following:
|
||||
''
|
||||
where *subcommand* is one of the following:
|
||||
|
||||
${subcommands}
|
||||
'';
|
||||
${subcommands}
|
||||
'';
|
||||
|
||||
subcommands = if length categories > 1
|
||||
then listCategories
|
||||
|
@ -70,40 +71,57 @@ let
|
|||
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
||||
'';
|
||||
|
||||
maybeDocumentation = optionalString
|
||||
(details ? doc)
|
||||
(replaceStrings ["@stores@"] [storeDocs] details.doc);
|
||||
# FIXME: this is a hack.
|
||||
# store parameters should not be part of command documentation to begin
|
||||
# with, but instead be rendered on separate pages.
|
||||
maybeStoreDocs = optionalString (details ? doc)
|
||||
(replaceStrings [ "@stores@" ] [ (showStoreDocs inlineHTML commandInfo.stores) ] details.doc);
|
||||
|
||||
maybeOptions = optionalString (details.flags != {}) ''
|
||||
maybeOptions = let
|
||||
allVisibleOptions = filterAttrs
|
||||
(_: o: ! o.hiddenCategory)
|
||||
(details.flags // toplevel.flags);
|
||||
in optionalString (allVisibleOptions != {}) ''
|
||||
# Options
|
||||
|
||||
${showOptions details.flags toplevel.flags}
|
||||
${showOptions inlineHTML allVisibleOptions}
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
||||
'';
|
||||
|
||||
showOptions = options: commonOptions:
|
||||
showOptions = inlineHTML: allOptions:
|
||||
let
|
||||
allOptions = options // commonOptions;
|
||||
showCategory = cat: ''
|
||||
${optionalString (cat != "") "**${cat}:**"}
|
||||
showCategory = cat: opts: ''
|
||||
${optionalString (cat != "") "## ${cat}"}
|
||||
|
||||
${listOptions (filterAttrs (n: v: v.category == cat) allOptions)}
|
||||
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
|
||||
'';
|
||||
listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
|
||||
showOption = name: option:
|
||||
let
|
||||
result = trim ''
|
||||
- ${item}
|
||||
|
||||
${option.description}
|
||||
'';
|
||||
item = if inlineHTML
|
||||
then ''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
|
||||
else "`--${name}` ${shortName} ${labels}";
|
||||
shortName = optionalString
|
||||
(option ? shortName)
|
||||
("/ `-${option.shortName}`");
|
||||
labels = optionalString
|
||||
(option ? labels)
|
||||
(concatStringsSep " " (map (s: "*${s}*") option.labels));
|
||||
in trim ''
|
||||
- `--${name}` ${shortName} ${labels}
|
||||
|
||||
${option.description}
|
||||
'';
|
||||
categories = sort lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
|
||||
in concatStrings (map showCategory categories);
|
||||
in result;
|
||||
categories = mapAttrs
|
||||
# Convert each group from a list of key-value pairs back to an attrset
|
||||
(_: listToAttrs)
|
||||
(groupBy
|
||||
(cmd: cmd.value.category)
|
||||
(attrsToList allOptions));
|
||||
in concatStrings (attrValues (mapAttrs showCategory categories));
|
||||
in squash result;
|
||||
|
||||
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
||||
|
@ -135,35 +153,4 @@ let
|
|||
" - [${page.command}](command-ref/new-cli/${page.name})";
|
||||
in concatStringsSep "\n" (map showEntry manpages) + "\n";
|
||||
|
||||
storeDocs =
|
||||
let
|
||||
showStore = name: { settings, doc, experimentalFeature }:
|
||||
let
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
> **Warning**
|
||||
> This store is part of an
|
||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
|
||||
To use this store, you need to make sure the corresponding experimental feature,
|
||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||
is enabled.
|
||||
For example, include the following in [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = ${experimentalFeature}
|
||||
```
|
||||
'';
|
||||
in ''
|
||||
## ${name}
|
||||
|
||||
${doc}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
**Settings**:
|
||||
|
||||
${showSettings { useAnchors = false; } settings}
|
||||
'';
|
||||
in concatStrings (attrValues (mapAttrs showStore commandInfo.stores));
|
||||
|
||||
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||
|
|
66
doc/manual/generate-settings.nix
Normal file
66
doc/manual/generate-settings.nix
Normal file
|
@ -0,0 +1,66 @@
|
|||
let
|
||||
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs;
|
||||
inherit (import ./utils.nix) concatStrings indent optionalString squash;
|
||||
in
|
||||
|
||||
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
|
||||
{ prefix, inlineHTML ? true }: settingsInfo:
|
||||
|
||||
let
|
||||
|
||||
showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
|
||||
let
|
||||
result = squash ''
|
||||
- ${item}
|
||||
|
||||
${indent " " body}
|
||||
'';
|
||||
item = if inlineHTML
|
||||
then ''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
|
||||
else "`${setting}`";
|
||||
# separate body to cleanly handle indentation
|
||||
body = ''
|
||||
${description}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
**Default:** ${showDefault documentDefault defaultValue}
|
||||
|
||||
${showAliases aliases}
|
||||
'';
|
||||
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
> **Warning**
|
||||
> This setting is part of an
|
||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
|
||||
To change this setting, you need to make sure the corresponding experimental feature,
|
||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||
is enabled.
|
||||
For example, include the following in [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = ${experimentalFeature}
|
||||
${setting} = ...
|
||||
```
|
||||
'';
|
||||
|
||||
showDefault = documentDefault: defaultValue:
|
||||
if documentDefault then
|
||||
# a StringMap value type is specified as a string, but
|
||||
# this shows the value type. The empty stringmap is `null` in
|
||||
# JSON, but that converts to `{ }` here.
|
||||
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
|
||||
then "*empty*"
|
||||
else if isBool defaultValue then
|
||||
if defaultValue then "`true`" else "`false`"
|
||||
else "`${toString defaultValue}`"
|
||||
else "*machine-specific*";
|
||||
|
||||
showAliases = aliases:
|
||||
optionalString (aliases != [])
|
||||
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||
|
||||
in result;
|
||||
|
||||
in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))
|
45
doc/manual/generate-store-info.nix
Normal file
45
doc/manual/generate-store-info.nix
Normal file
|
@ -0,0 +1,45 @@
|
|||
let
|
||||
inherit (builtins) attrValues mapAttrs;
|
||||
inherit (import ./utils.nix) concatStrings optionalString;
|
||||
showSettings = import ./generate-settings.nix;
|
||||
in
|
||||
|
||||
inlineHTML: storesInfo:
|
||||
|
||||
let
|
||||
|
||||
showStore = name: { settings, doc, experimentalFeature }:
|
||||
let
|
||||
|
||||
result = ''
|
||||
## ${name}
|
||||
|
||||
${doc}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
### Settings
|
||||
|
||||
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||
'';
|
||||
|
||||
# markdown doesn't like spaces in URLs
|
||||
slug = builtins.replaceStrings [ " " ] [ "-" ] name;
|
||||
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
> **Warning**
|
||||
> This store is part of an
|
||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
|
||||
To use this store, you need to make sure the corresponding experimental feature,
|
||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||
is enabled.
|
||||
For example, include the following in [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = ${experimentalFeature}
|
||||
```
|
||||
'';
|
||||
in result;
|
||||
|
||||
in concatStrings (attrValues (mapAttrs showStore storesInfo))
|
|
@ -32,7 +32,7 @@ dummy-env = env -i \
|
|||
NIX_STATE_DIR=/dummy \
|
||||
NIX_CONFIG='cores = 0'
|
||||
|
||||
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
|
||||
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix=doc/manual --store dummy:// --impure --raw
|
||||
|
||||
# re-implement mdBook's include directive to make it usable for terminal output and for proper @docroot@ substitution
|
||||
define process-includes
|
||||
|
@ -96,14 +96,14 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/sr
|
|||
@cp $< $@
|
||||
@$(call process-includes,$@,$@)
|
||||
|
||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(bindir)/nix
|
||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(d)/generate-settings.nix $(d)/generate-store-info.nix $(bindir)/nix
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)'
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/utils.nix $(d)/generate-settings.nix $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/experimental-features-shortlist.md $(bindir)/nix
|
||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||
$(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { useAnchors = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-settings.nix { prefix = "opt-"; } (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp;
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/nix.json: $(bindir)/nix
|
||||
|
@ -125,7 +125,7 @@ $(d)/src/command-ref/experimental-features-shortlist.md: $(d)/xp-features.json $
|
|||
@mv $@.tmp $@
|
||||
|
||||
$(d)/xp-features.json: $(bindir)/nix
|
||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-xp-features > $@.tmp
|
||||
$(trace-gen) $(dummy-env) $(bindir)/nix __dump-xp-features > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/language/builtins.md: $(d)/language.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
||||
|
@ -141,7 +141,7 @@ $(d)/src/language/builtin-constants.md: $(d)/language.json $(d)/generate-builtin
|
|||
@mv $@.tmp $@
|
||||
|
||||
$(d)/language.json: $(bindir)/nix
|
||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-language > $@.tmp
|
||||
$(trace-gen) $(dummy-env) $(bindir)/nix __dump-language > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
# Generate the HTML manual.
|
||||
|
@ -173,6 +173,10 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
|||
done
|
||||
@touch $@
|
||||
|
||||
# the `! -name 'contributing.md'` filter excludes the one place where
|
||||
# `@docroot@` is to be preserved for documenting the mechanism
|
||||
# FIXME: maybe contributing guides should live right next to the code
|
||||
# instead of in the manual
|
||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(d)/src/language/builtin-constants.md
|
||||
$(trace-gen) \
|
||||
tmp="$$(mktemp -d)"; \
|
||||
|
@ -180,7 +184,7 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/
|
|||
find "$$tmp" -name '*.md' | while read -r file; do \
|
||||
$(call process-includes,$$file,$$file); \
|
||||
done; \
|
||||
find "$$tmp" -name '*.md' | while read -r file; do \
|
||||
find "$$tmp" -name '*.md' ! -name 'documentation.md' | while read -r file; do \
|
||||
docroot="$$(realpath --relative-to="$$(dirname "$$file")" $$tmp/manual/src)"; \
|
||||
sed -i "s,@docroot@,$$docroot,g" "$$file"; \
|
||||
done; \
|
||||
|
|
|
@ -336,14 +336,13 @@ const redirects = {
|
|||
"simple-values": "#primitives",
|
||||
"lists": "#list",
|
||||
"strings": "#string",
|
||||
"lists": "#list",
|
||||
"attribute-sets": "#attribute-set",
|
||||
},
|
||||
"installation/installing-binary.html": {
|
||||
"linux": "uninstall.html#linux",
|
||||
"macos": "uninstall.html#macos",
|
||||
"uninstalling": "uninstall.html",
|
||||
}
|
||||
},
|
||||
"contributing/hacking.html": {
|
||||
"nix-with-flakes": "#building-nix-with-flakes",
|
||||
"classic-nix": "#building-nix",
|
||||
|
@ -355,6 +354,7 @@ const redirects = {
|
|||
"installer-tests": "testing.html#installer-tests",
|
||||
"one-time-setup": "testing.html#one-time-setup",
|
||||
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||
"characterization-testing": "#characterisation-testing-unit",
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
- [Upgrading Nix](installation/upgrading.md)
|
||||
- [Uninstalling Nix](installation/uninstall.md)
|
||||
- [Package Management](package-management/package-management.md)
|
||||
- [Basic Package Management](package-management/basic-package-mgmt.md)
|
||||
- [Profiles](package-management/profiles.md)
|
||||
- [Garbage Collection](package-management/garbage-collection.md)
|
||||
- [Garbage Collector Roots](package-management/garbage-collector-roots.md)
|
||||
|
@ -30,9 +29,11 @@
|
|||
- [Data Types](language/values.md)
|
||||
- [Language Constructs](language/constructs.md)
|
||||
- [String interpolation](language/string-interpolation.md)
|
||||
- [Lookup path](language/constructs/lookup-path.md)
|
||||
- [Operators](language/operators.md)
|
||||
- [Derivations](language/derivations.md)
|
||||
- [Advanced Attributes](language/advanced-attributes.md)
|
||||
- [Import From Derivation](language/import-from-derivation.md)
|
||||
- [Built-in Constants](language/builtin-constants.md)
|
||||
- [Built-in Functions](language/builtins.md)
|
||||
- [Advanced Topics](advanced-topics/advanced-topics.md)
|
||||
|
@ -100,15 +101,18 @@
|
|||
- [File System Object](architecture/file-system-object.md)
|
||||
- [Protocols](protocols/protocols.md)
|
||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||
- [Glossary](glossary.md)
|
||||
- [Contributing](contributing/contributing.md)
|
||||
- [Hacking](contributing/hacking.md)
|
||||
- [Testing](contributing/testing.md)
|
||||
- [Documentation](contributing/documentation.md)
|
||||
- [Experimental Features](contributing/experimental-features.md)
|
||||
- [CLI guideline](contributing/cli-guideline.md)
|
||||
- [C++ style guide](contributing/cxx.md)
|
||||
- [Release Notes](release-notes/release-notes.md)
|
||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
||||
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
||||
- [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md)
|
||||
- [Release 2.15 (2023-04-11)](release-notes/rl-2.15.md)
|
||||
|
|
|
@ -1 +1 @@
|
|||
|
||||
This section lists advanced topics related to builds and builds performance
|
||||
|
|
|
@ -12,14 +12,14 @@ machine is accessible via SSH and that it has Nix installed. You can
|
|||
test whether connecting to the remote Nix instance works, e.g.
|
||||
|
||||
```console
|
||||
$ nix store ping --store ssh://mac
|
||||
$ nix store info --store ssh://mac
|
||||
```
|
||||
|
||||
will try to connect to the machine named `mac`. It is possible to
|
||||
specify an SSH identity file as part of the remote store URI, e.g.
|
||||
|
||||
```console
|
||||
$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key
|
||||
$ nix store info --store ssh://mac?ssh-key=/home/alice/my-key
|
||||
```
|
||||
|
||||
Since builds should be non-interactive, the key should not have a
|
||||
|
|
|
@ -69,6 +69,8 @@ exec nix copy --to "s3://example-nix-cache" $OUT_PATHS
|
|||
> store sign`. Nix guarantees the paths will not contain any spaces,
|
||||
> however a store path might contain glob characters. The `set -f`
|
||||
> disables globbing in the shell.
|
||||
> If you want to upload the `.drv` file too, the `$DRV_PATH` variable
|
||||
> is also defined for the script and works just like `$OUT_PATHS`.
|
||||
|
||||
Then make sure the hook program is executable by the `root` user:
|
||||
|
||||
|
|
|
@ -2,109 +2,124 @@
|
|||
|
||||
Most Nix commands interpret the following environment variables:
|
||||
|
||||
- <span id="env-IN_NIX_SHELL">[`IN_NIX_SHELL`](#env-IN_NIX_SHELL)</span>\
|
||||
Indicator that tells if the current environment was set up by
|
||||
`nix-shell`. It can have the values `pure` or `impure`.
|
||||
- <span id="env-IN_NIX_SHELL">[`IN_NIX_SHELL`](#env-IN_NIX_SHELL)</span>
|
||||
|
||||
- <span id="env-NIX_PATH">[`NIX_PATH`](#env-NIX_PATH)</span>\
|
||||
A colon-separated list of directories used to look up the location of Nix
|
||||
expressions using [paths](@docroot@/language/values.md#type-path)
|
||||
enclosed in angle brackets (i.e., `<path>`),
|
||||
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||
[`-I` option](@docroot@/command-ref/opt-common.md#opt-I).
|
||||
Indicator that tells if the current environment was set up by
|
||||
`nix-shell`. It can have the values `pure` or `impure`.
|
||||
|
||||
If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode:
|
||||
- <span id="env-NIX_PATH">[`NIX_PATH`](#env-NIX_PATH)</span>
|
||||
|
||||
1. `$HOME/.nix-defexpr/channels`
|
||||
2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs`
|
||||
3. `/nix/var/nix/profiles/per-user/root/channels`
|
||||
A colon-separated list of directories used to look up the location of Nix
|
||||
expressions using [paths](@docroot@/language/values.md#type-path)
|
||||
enclosed in angle brackets (i.e., `<path>`),
|
||||
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||
[`-I` option](@docroot@/command-ref/opt-common.md#opt-I).
|
||||
|
||||
If `NIX_PATH` is set to an empty string, resolving search paths will always fail.
|
||||
For example, attempting to use `<nixpkgs>` will produce:
|
||||
If `NIX_PATH` is not set at all, Nix will fall back to the following list in [impure](@docroot@/command-ref/conf-file.md#conf-pure-eval) and [unrestricted](@docroot@/command-ref/conf-file.md#conf-restrict-eval) evaluation mode:
|
||||
|
||||
error: file 'nixpkgs' was not found in the Nix search path
|
||||
1. `$HOME/.nix-defexpr/channels`
|
||||
2. `nixpkgs=/nix/var/nix/profiles/per-user/root/channels/nixpkgs`
|
||||
3. `/nix/var/nix/profiles/per-user/root/channels`
|
||||
|
||||
- <span id="env-NIX_IGNORE_SYMLINK_STORE">[`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE)</span>\
|
||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||
allowed to contain any symlink components. This is to prevent
|
||||
“impure” builds. Builders sometimes “canonicalise” paths by
|
||||
resolving all symlink components. Thus, builds on different machines
|
||||
(with `/nix/store` resolving to different locations) could yield
|
||||
different results. This is generally not a problem, except when
|
||||
builds are deployed to machines where `/nix/store` resolves
|
||||
differently. If you are sure that you’re not going to do that, you
|
||||
can set `NIX_IGNORE_SYMLINK_STORE` to `1`.
|
||||
If `NIX_PATH` is set to an empty string, resolving search paths will always fail.
|
||||
For example, attempting to use `<nixpkgs>` will produce:
|
||||
|
||||
Note that if you’re symlinking the Nix store so that you can put it
|
||||
on another file system than the root file system, on Linux you’re
|
||||
better off using `bind` mount points, e.g.,
|
||||
error: file 'nixpkgs' was not found in the Nix search path
|
||||
|
||||
```console
|
||||
$ mkdir /nix
|
||||
$ mount -o bind /mnt/otherdisk/nix /nix
|
||||
```
|
||||
- <span id="env-NIX_IGNORE_SYMLINK_STORE">[`NIX_IGNORE_SYMLINK_STORE`](#env-NIX_IGNORE_SYMLINK_STORE)</span>
|
||||
|
||||
Consult the mount 8 manual page for details.
|
||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||
allowed to contain any symlink components. This is to prevent
|
||||
“impure” builds. Builders sometimes “canonicalise” paths by
|
||||
resolving all symlink components. Thus, builds on different machines
|
||||
(with `/nix/store` resolving to different locations) could yield
|
||||
different results. This is generally not a problem, except when
|
||||
builds are deployed to machines where `/nix/store` resolves
|
||||
differently. If you are sure that you’re not going to do that, you
|
||||
can set `NIX_IGNORE_SYMLINK_STORE` to `1`.
|
||||
|
||||
- <span id="env-NIX_STORE_DIR">[`NIX_STORE_DIR`](#env-NIX_STORE_DIR)</span>\
|
||||
Overrides the location of the Nix store (default `prefix/store`).
|
||||
Note that if you’re symlinking the Nix store so that you can put it
|
||||
on another file system than the root file system, on Linux you’re
|
||||
better off using `bind` mount points, e.g.,
|
||||
|
||||
- <span id="env-NIX_DATA_DIR">[`NIX_DATA_DIR`](#env-NIX_DATA_DIR)</span>\
|
||||
Overrides the location of the Nix static data directory (default
|
||||
`prefix/share`).
|
||||
```console
|
||||
$ mkdir /nix
|
||||
$ mount -o bind /mnt/otherdisk/nix /nix
|
||||
```
|
||||
|
||||
- <span id="env-NIX_LOG_DIR">[`NIX_LOG_DIR`](#env-NIX_LOG_DIR)</span>\
|
||||
Overrides the location of the Nix log directory (default
|
||||
`prefix/var/log/nix`).
|
||||
Consult the mount 8 manual page for details.
|
||||
|
||||
- <span id="env-NIX_STATE_DIR">[`NIX_STATE_DIR`](#env-NIX_STATE_DIR)</span>\
|
||||
Overrides the location of the Nix state directory (default
|
||||
`prefix/var/nix`).
|
||||
- <span id="env-NIX_STORE_DIR">[`NIX_STORE_DIR`](#env-NIX_STORE_DIR)</span>
|
||||
|
||||
- <span id="env-NIX_CONF_DIR">[`NIX_CONF_DIR`](#env-NIX_CONF_DIR)</span>\
|
||||
Overrides the location of the system Nix configuration directory
|
||||
(default `prefix/etc/nix`).
|
||||
Overrides the location of the Nix store (default `prefix/store`).
|
||||
|
||||
- <span id="env-NIX_CONFIG">[`NIX_CONFIG`](#env-NIX_CONFIG)</span>\
|
||||
Applies settings from Nix configuration from the environment.
|
||||
The content is treated as if it was read from a Nix configuration file.
|
||||
Settings are separated by the newline character.
|
||||
- <span id="env-NIX_DATA_DIR">[`NIX_DATA_DIR`](#env-NIX_DATA_DIR)</span>
|
||||
|
||||
- <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>\
|
||||
Overrides the location of the Nix user configuration files to load from.
|
||||
Overrides the location of the Nix static data directory (default
|
||||
`prefix/share`).
|
||||
|
||||
The default are the locations according to the [XDG Base Directory Specification].
|
||||
See the [XDG Base Directories](#xdg-base-directories) sub-section for details.
|
||||
- <span id="env-NIX_LOG_DIR">[`NIX_LOG_DIR`](#env-NIX_LOG_DIR)</span>
|
||||
|
||||
The variable is treated as a list separated by the `:` token.
|
||||
Overrides the location of the Nix log directory (default
|
||||
`prefix/var/log/nix`).
|
||||
|
||||
- <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>\
|
||||
Use the specified directory to store temporary files. In particular,
|
||||
this includes temporary build directories; these can take up
|
||||
substantial amounts of disk space. The default is `/tmp`.
|
||||
- <span id="env-NIX_STATE_DIR">[`NIX_STATE_DIR`](#env-NIX_STATE_DIR)</span>
|
||||
|
||||
- <span id="env-NIX_REMOTE">[`NIX_REMOTE`](#env-NIX_REMOTE)</span>\
|
||||
This variable should be set to `daemon` if you want to use the Nix
|
||||
daemon to execute Nix operations. This is necessary in [multi-user
|
||||
Nix installations](@docroot@/installation/multi-user.md). If the Nix
|
||||
daemon's Unix socket is at some non-standard path, this variable
|
||||
should be set to `unix://path/to/socket`. Otherwise, it should be
|
||||
left unset.
|
||||
Overrides the location of the Nix state directory (default
|
||||
`prefix/var/nix`).
|
||||
|
||||
- <span id="env-NIX_SHOW_STATS">[`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS)</span>\
|
||||
If set to `1`, Nix will print some evaluation statistics, such as
|
||||
the number of values allocated.
|
||||
- <span id="env-NIX_CONF_DIR">[`NIX_CONF_DIR`](#env-NIX_CONF_DIR)</span>
|
||||
|
||||
- <span id="env-NIX_COUNT_CALLS">[`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS)</span>\
|
||||
If set to `1`, Nix will print how often functions were called during
|
||||
Nix expression evaluation. This is useful for profiling your Nix
|
||||
expressions.
|
||||
Overrides the location of the system Nix configuration directory
|
||||
(default `prefix/etc/nix`).
|
||||
|
||||
- <span id="env-GC_INITIAL_HEAP_SIZE">[`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE)</span>\
|
||||
If Nix has been configured to use the Boehm garbage collector, this
|
||||
variable sets the initial size of the heap in bytes. It defaults to
|
||||
384 MiB. Setting it to a low value reduces memory consumption, but
|
||||
will increase runtime due to the overhead of garbage collection.
|
||||
- <span id="env-NIX_CONFIG">[`NIX_CONFIG`](#env-NIX_CONFIG)</span>
|
||||
|
||||
Applies settings from Nix configuration from the environment.
|
||||
The content is treated as if it was read from a Nix configuration file.
|
||||
Settings are separated by the newline character.
|
||||
|
||||
- <span id="env-NIX_USER_CONF_FILES">[`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)</span>
|
||||
|
||||
Overrides the location of the Nix user configuration files to load from.
|
||||
|
||||
The default are the locations according to the [XDG Base Directory Specification].
|
||||
See the [XDG Base Directories](#xdg-base-directories) sub-section for details.
|
||||
|
||||
The variable is treated as a list separated by the `:` token.
|
||||
|
||||
- <span id="env-TMPDIR">[`TMPDIR`](#env-TMPDIR)</span>
|
||||
|
||||
Use the specified directory to store temporary files. In particular,
|
||||
this includes temporary build directories; these can take up
|
||||
substantial amounts of disk space. The default is `/tmp`.
|
||||
|
||||
- <span id="env-NIX_REMOTE">[`NIX_REMOTE`](#env-NIX_REMOTE)</span>
|
||||
|
||||
This variable should be set to `daemon` if you want to use the Nix
|
||||
daemon to execute Nix operations. This is necessary in [multi-user
|
||||
Nix installations](@docroot@/installation/multi-user.md). If the Nix
|
||||
daemon's Unix socket is at some non-standard path, this variable
|
||||
should be set to `unix://path/to/socket`. Otherwise, it should be
|
||||
left unset.
|
||||
|
||||
- <span id="env-NIX_SHOW_STATS">[`NIX_SHOW_STATS`](#env-NIX_SHOW_STATS)</span>
|
||||
|
||||
If set to `1`, Nix will print some evaluation statistics, such as
|
||||
the number of values allocated.
|
||||
|
||||
- <span id="env-NIX_COUNT_CALLS">[`NIX_COUNT_CALLS`](#env-NIX_COUNT_CALLS)</span>
|
||||
|
||||
If set to `1`, Nix will print how often functions were called during
|
||||
Nix expression evaluation. This is useful for profiling your Nix
|
||||
expressions.
|
||||
|
||||
- <span id="env-GC_INITIAL_HEAP_SIZE">[`GC_INITIAL_HEAP_SIZE`](#env-GC_INITIAL_HEAP_SIZE)</span>
|
||||
|
||||
If Nix has been configured to use the Boehm garbage collector, this
|
||||
variable sets the initial size of the heap in bytes. It defaults to
|
||||
384 MiB. Setting it to a low value reduces memory consumption, but
|
||||
will increase runtime due to the overhead of garbage collection.
|
||||
|
||||
## XDG Base Directories
|
||||
|
||||
|
|
|
@ -14,23 +14,28 @@
|
|||
|
||||
# Description
|
||||
|
||||
The install operation creates a new user environment, based on the
|
||||
current generation of the active profile, to which a set of store paths
|
||||
described by *args* is added. The arguments *args* map to store paths in
|
||||
a number of possible ways:
|
||||
The install operation creates a new user environment.
|
||||
It is based on the current generation of the active [profile](@docroot@/command-ref/files/profiles.md), to which a set of [store paths] described by *args* is added.
|
||||
|
||||
- By default, *args* is a set of derivation names denoting derivations
|
||||
in the active Nix expression. These are realised, and the resulting
|
||||
output paths are installed. Currently installed derivations with a
|
||||
name equal to the name of a derivation being added are removed
|
||||
unless the option `--preserve-installed` is specified.
|
||||
[store paths]: @docroot@/glossary.md#gloss-store-path
|
||||
|
||||
The arguments *args* map to store paths in a number of possible ways:
|
||||
|
||||
|
||||
- By default, *args* is a set of [derivation] names denoting derivations in the [default Nix expression].
|
||||
These are [realised], and the resulting output paths are installed.
|
||||
Currently installed derivations with a name equal to the name of a derivation being added are removed unless the option `--preserve-installed` is specified.
|
||||
|
||||
[derivation]: @docroot@/glossary.md#gloss-derivation
|
||||
[default Nix expression]: @docroot@/command-ref/files/default-nix-expression.md
|
||||
[realised]: @docroot@/glossary.md#gloss-realise
|
||||
|
||||
If there are multiple derivations matching a name in *args* that
|
||||
have the same name (e.g., `gcc-3.3.6` and `gcc-4.1.1`), then the
|
||||
derivation with the highest *priority* is used. A derivation can
|
||||
define a priority by declaring the `meta.priority` attribute. This
|
||||
attribute should be a number, with a higher value denoting a lower
|
||||
priority. The default priority is `0`.
|
||||
priority. The default priority is `5`.
|
||||
|
||||
If there are multiple matching derivations with the same priority,
|
||||
then the derivation with the highest version will be installed.
|
||||
|
@ -40,44 +45,90 @@ a number of possible ways:
|
|||
gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will
|
||||
probably cause a user environment conflict\!).
|
||||
|
||||
- If `--attr` (`-A`) is specified, the arguments are *attribute
|
||||
paths* that select attributes from the top-level Nix
|
||||
expression. This is faster than using derivation names and
|
||||
unambiguous. To find out the attribute paths of available
|
||||
packages, use `nix-env --query --available --attr-path `.
|
||||
- If [`--attr`](#opt-attr) / `-A` is specified, the arguments are *attribute paths* that select attributes from the [default Nix expression].
|
||||
This is faster than using derivation names and unambiguous.
|
||||
Show the attribute paths of available packages with [`nix-env --query`](./query.md):
|
||||
|
||||
```console
|
||||
nix-env --query --available --attr-path`
|
||||
```
|
||||
|
||||
- If `--from-profile` *path* is given, *args* is a set of names
|
||||
denoting installed store paths in the profile *path*. This is an
|
||||
denoting installed [store paths] in the profile *path*. This is an
|
||||
easy way to copy user environment elements from one profile to
|
||||
another.
|
||||
|
||||
- If `--from-expression` is given, *args* are Nix
|
||||
[functions](@docroot@/language/constructs.md#functions)
|
||||
that are called with the active Nix expression as their single
|
||||
argument. The derivations returned by those function calls are
|
||||
installed. This allows derivations to be specified in an
|
||||
unambiguous way, which is necessary if there are multiple
|
||||
derivations with the same name.
|
||||
- If `--from-expression` is given, *args* are [Nix language functions](@docroot@/language/constructs.md#functions) that are called with the [default Nix expression] as their single argument.
|
||||
The derivations returned by those function calls are installed.
|
||||
This allows derivations to be specified in an unambiguous way, which is necessary if there are multiple derivations with the same name.
|
||||
|
||||
- If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are
|
||||
[realised](@docroot@/command-ref/nix-store/realise.md), and the resulting output paths
|
||||
are installed.
|
||||
- If *args* are [store derivations](@docroot@/glossary.md#gloss-store-derivation), then these are [realised], and the resulting output paths are installed.
|
||||
|
||||
- If *args* are store paths that are not store derivations, then these
|
||||
are [realised](@docroot@/command-ref/nix-store/realise.md) and installed.
|
||||
- If *args* are [store paths] that are not store derivations, then these are [realised] and installed.
|
||||
|
||||
- By default all outputs are installed for each derivation. That can
|
||||
be reduced by setting `meta.outputsToInstall`.
|
||||
- By default all [outputs](@docroot@/language/derivations.md#attr-outputs) are installed for each [derivation].
|
||||
This can be overridden by adding a `meta.outputsToInstall` attribute on the derivation listing a subset of the output names.
|
||||
|
||||
# Flags
|
||||
Example:
|
||||
|
||||
The file `example.nix` defines a derivation with two outputs `foo` and `bar`, each containing a file.
|
||||
|
||||
```nix
|
||||
# example.nix
|
||||
let
|
||||
pkgs = import <nixpkgs> {};
|
||||
command = ''
|
||||
${pkgs.coreutils}/bin/mkdir -p $foo $bar
|
||||
echo foo > $foo/foo-file
|
||||
echo bar > $bar/bar-file
|
||||
'';
|
||||
in
|
||||
derivation {
|
||||
name = "example";
|
||||
builder = "${pkgs.bash}/bin/bash";
|
||||
args = [ "-c" command ];
|
||||
outputs = [ "foo" "bar" ];
|
||||
system = builtins.currentSystem;
|
||||
}
|
||||
```
|
||||
|
||||
Installing from this Nix expression will make files from both outputs appear in the current profile.
|
||||
|
||||
```console
|
||||
$ nix-env --install --file example.nix
|
||||
installing 'example'
|
||||
$ ls ~/.nix-profile
|
||||
foo-file
|
||||
bar-file
|
||||
manifest.nix
|
||||
```
|
||||
|
||||
Adding `meta.outputsToInstall` to that derivation will make `nix-env` only install files from the specified outputs.
|
||||
|
||||
```nix
|
||||
# example-outputs.nix
|
||||
import ./example.nix // { meta.outputsToInstall = [ "bar" ]; }
|
||||
```
|
||||
|
||||
```console
|
||||
$ nix-env --install --file example-outputs.nix
|
||||
installing 'example'
|
||||
$ ls ~/.nix-profile
|
||||
bar-file
|
||||
manifest.nix
|
||||
```
|
||||
|
||||
# Options
|
||||
|
||||
- `--prebuilt-only` / `-b`
|
||||
|
||||
- `--prebuilt-only` / `-b`\
|
||||
Use only derivations for which a substitute is registered, i.e.,
|
||||
there is a pre-built binary available that can be downloaded in lieu
|
||||
of building the derivation. Thus, no packages will be built from
|
||||
source.
|
||||
|
||||
- `--preserve-installed` / `-P`\
|
||||
- `--preserve-installed` / `-P`
|
||||
|
||||
Do not remove derivations with a name matching one of the
|
||||
derivations being installed. Usually, trying to have two versions of
|
||||
the same package installed in the same generation of a profile will
|
||||
|
@ -85,7 +136,8 @@ a number of possible ways:
|
|||
clashes between the two versions. However, this is not the case for
|
||||
all packages.
|
||||
|
||||
- `--remove-all` / `-r`\
|
||||
- `--remove-all` / `-r`
|
||||
|
||||
Remove all previously installed packages first. This is equivalent
|
||||
to running `nix-env --uninstall '.*'` first, except that everything happens
|
||||
in a single transaction.
|
||||
|
|
|
@ -31,15 +31,18 @@ store already contains a file with the same hash and base name.
|
|||
Otherwise, the file is downloaded, and an error is signaled if the
|
||||
actual hash of the file does not match the specified hash.
|
||||
|
||||
This command prints the hash on standard output. Additionally, if the
|
||||
option `--print-path` is used, the path of the downloaded file in the
|
||||
Nix store is also printed.
|
||||
This command prints the hash on standard output.
|
||||
The hash is printed using base-32 unless `--type md5` is specified,
|
||||
in which case it's printed using base-16.
|
||||
Additionally, if the option `--print-path` is used,
|
||||
the path of the downloaded file in the Nix store is also printed.
|
||||
|
||||
# Options
|
||||
|
||||
- `--type` *hashAlgo*\
|
||||
Use the specified cryptographic hash algorithm, which can be one of
|
||||
`md5`, `sha1`, `sha256`, and `sha512`.
|
||||
Use the specified cryptographic hash algorithm,
|
||||
which can be one of `md5`, `sha1`, `sha256`, and `sha512`.
|
||||
The default is `sha256`.
|
||||
|
||||
- `--print-path`\
|
||||
Print the store path of the downloaded file on standard output.
|
||||
|
|
|
@ -235,14 +235,14 @@ package like Terraform:
|
|||
|
||||
```bash
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i bash --packages "terraform.withPlugins (plugins: [ plugins.openstack ])"
|
||||
#! nix-shell -i bash --packages 'terraform.withPlugins (plugins: [ plugins.openstack ])'
|
||||
|
||||
terraform apply
|
||||
```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> You must use double quotes (`"`) when passing a simple Nix expression
|
||||
> You must use single or double quotes (`'`, `"`) when passing a simple Nix expression
|
||||
> in a nix-shell shebang.
|
||||
|
||||
Finally, using the merging of multiple nix-shell shebangs the following
|
||||
|
@ -251,7 +251,7 @@ branch):
|
|||
|
||||
```haskell
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i runghc --packages "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])"
|
||||
#! nix-shell -i runghc --packages 'haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])'
|
||||
#! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-20.03.tar.gz
|
||||
|
||||
import Network.Curl.Download
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
# Synopsis
|
||||
|
||||
`nix-store` {`--query` | `-q`}
|
||||
{`--outputs` | `--requisites` | `-R` | `--references` |
|
||||
`--referrers` | `--referrers-closure` | `--deriver` | `-d` |
|
||||
{`--outputs` | `--requisites` | `-R` | `--references` | `--referrers` |
|
||||
`--referrers-closure` | `--deriver` | `-d` | `--valid-derivers` |
|
||||
`--graph` | `--tree` | `--binding` *name* | `-b` *name* | `--hash` |
|
||||
`--size` | `--roots`}
|
||||
[`--use-output`] [`-u`] [`--force-realise`] [`-f`]
|
||||
|
@ -82,13 +82,21 @@ symlink.
|
|||
in the Nix store that are dependent on *paths*.
|
||||
|
||||
- `--deriver`; `-d`\
|
||||
Prints the [deriver] of the store paths *paths*. If
|
||||
Prints the [deriver] that was used to build the store paths *paths*. If
|
||||
the path has no deriver (e.g., if it is a source file), or if the
|
||||
deriver is not known (e.g., in the case of a binary-only
|
||||
deployment), the string `unknown-deriver` is printed.
|
||||
The returned deriver is not guaranteed to exist in the local store, for
|
||||
example when *paths* were substituted from a binary cache.
|
||||
Use `--valid-derivers` instead to obtain valid paths only.
|
||||
|
||||
[deriver]: ../../glossary.md#gloss-deriver
|
||||
|
||||
- `--valid-derivers`\
|
||||
Prints a set of derivation files (`.drv`) which are supposed produce
|
||||
said paths when realized. Might print nothing, for example for source paths
|
||||
or paths subsituted from a binary cache.
|
||||
|
||||
- `--graph`\
|
||||
Prints the references graph of the store paths *paths* in the format
|
||||
of the `dot` tool of AT\&T's [Graphviz
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Name
|
||||
|
||||
`nix-store --realise` - realise specified store paths
|
||||
`nix-store --realise` - build or fetch store objects
|
||||
|
||||
# Synopsis
|
||||
|
||||
|
@ -8,33 +8,39 @@
|
|||
|
||||
# Description
|
||||
|
||||
The operation `--realise` essentially “builds” the specified store
|
||||
paths. Realisation is a somewhat overloaded term:
|
||||
|
||||
- If the store path is a *derivation*, realisation ensures that the
|
||||
output paths of the derivation are [valid] (i.e.,
|
||||
the output path and its closure exist in the file system). This
|
||||
can be done in several ways. First, it is possible that the
|
||||
outputs are already valid, in which case we are done
|
||||
immediately. Otherwise, there may be [substitutes]
|
||||
that produce the outputs (e.g., by downloading them). Finally, the
|
||||
outputs can be produced by running the build task described
|
||||
by the derivation.
|
||||
Each of *paths* is processed as follows:
|
||||
|
||||
- If the store path is not a derivation, realisation ensures that the
|
||||
specified path is valid (i.e., it and its closure exist in the file
|
||||
system). If the path is already valid, we are done immediately.
|
||||
Otherwise, the path and any missing paths in its closure may be
|
||||
produced through substitutes. If there are no (successful)
|
||||
substitutes, realisation fails.
|
||||
- If the path leads to a [store derivation]:
|
||||
1. If it is not [valid], substitute the store derivation file itself.
|
||||
2. Realise its [output paths]:
|
||||
- Try to fetch from [substituters] the [store objects] associated with the output paths in the store derivation's [closure].
|
||||
- With [content-addressed derivations] (experimental):
|
||||
Determine the output paths to realise by querying content-addressed realisation entries in the [Nix database].
|
||||
- For any store paths that cannot be substituted, produce the required store objects:
|
||||
1. Realise all outputs of the derivation's dependencies
|
||||
2. Run the derivation's [`builder`](@docroot@/language/derivations.md#attr-builder) executable
|
||||
<!-- TODO: Link to build process page #8888 -->
|
||||
- Otherwise, and if the path is not already valid: Try to fetch the associated [store objects] in the path's [closure] from [substituters].
|
||||
|
||||
If no substitutes are available and no store derivation is given, realisation fails.
|
||||
|
||||
[store paths]: @docroot@/glossary.md#gloss-store-path
|
||||
[valid]: @docroot@/glossary.md#gloss-validity
|
||||
[substitutes]: @docroot@/glossary.md#gloss-substitute
|
||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||
[output paths]: @docroot@/glossary.md#gloss-output-path
|
||||
[store objects]: @docroot@/glossary.md#gloss-store-object
|
||||
[closure]: @docroot@/glossary.md#gloss-closure
|
||||
[substituters]: @docroot@/command-ref/conf-file.md#conf-substituters
|
||||
[content-addressed derivations]: @docroot@/contributing/experimental-features.md#xp-feature-ca-derivations
|
||||
[Nix database]: @docroot@/glossary.md#gloss-nix-database
|
||||
|
||||
The output path of each derivation is printed on standard output. (For
|
||||
non-derivations argument, the argument itself is printed.)
|
||||
The resulting paths are printed on standard output.
|
||||
For non-derivation arguments, the argument itself is printed.
|
||||
|
||||
The following flags are available:
|
||||
{{#include ../status-build-failure.md}}
|
||||
|
||||
# Options
|
||||
|
||||
- `--dry-run`\
|
||||
Print on standard error a description of what packages would be
|
||||
|
@ -54,8 +60,6 @@ The following flags are available:
|
|||
previous build, the new output path is left in
|
||||
`/nix/store/name.check.`
|
||||
|
||||
{{#include ../status-build-failure.md}}
|
||||
|
||||
{{#include ./opt-common.md}}
|
||||
|
||||
{{#include ../opt-common.md}}
|
||||
|
@ -67,8 +71,6 @@ The following flags are available:
|
|||
This operation is typically used to build [store derivation]s produced by
|
||||
[`nix-instantiate`](@docroot@/command-ref/nix-instantiate.md):
|
||||
|
||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||
|
||||
```console
|
||||
$ nix-store --realise $(nix-instantiate ./test.nix)
|
||||
/nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
\--help
|
||||
|
||||
\--version
|
||||
|
||||
\--verbose
|
||||
|
||||
\-v
|
||||
|
||||
\--quiet
|
||||
|
||||
\--log-format
|
||||
|
||||
format
|
||||
|
||||
\--no-build-output
|
||||
|
||||
\-Q
|
||||
|
||||
\--max-jobs
|
||||
|
||||
\-j
|
||||
|
||||
number
|
||||
|
||||
\--cores
|
||||
|
||||
number
|
||||
|
||||
\--max-silent-time
|
||||
|
||||
number
|
||||
|
||||
\--timeout
|
||||
|
||||
number
|
||||
|
||||
\--keep-going
|
||||
|
||||
\-k
|
||||
|
||||
\--keep-failed
|
||||
|
||||
\-K
|
||||
|
||||
\--fallback
|
||||
|
||||
\--readonly-mode
|
||||
|
||||
\-I
|
||||
|
||||
path
|
||||
|
||||
\--option
|
||||
|
||||
name
|
||||
|
||||
value
|
|
@ -2,217 +2,208 @@
|
|||
|
||||
Most Nix commands accept the following command-line options:
|
||||
|
||||
- <span id="opt-help">[`--help`](#opt-help)</span>\
|
||||
Prints out a summary of the command syntax and exits.
|
||||
- <span id="opt-help">[`--help`](#opt-help)</span>
|
||||
|
||||
- <span id="opt-version">[`--version`](#opt-version)</span>\
|
||||
Prints out the Nix version number on standard output and exits.
|
||||
Prints out a summary of the command syntax and exits.
|
||||
|
||||
- <span id="opt-verbose">[`--verbose`](#opt-verbose)</span> / `-v`\
|
||||
Increases the level of verbosity of diagnostic messages printed on
|
||||
standard error. For each Nix operation, the information printed on
|
||||
standard output is well-defined; any diagnostic information is
|
||||
printed on standard error, never on standard output.
|
||||
- <span id="opt-version">[`--version`](#opt-version)</span>
|
||||
|
||||
This option may be specified repeatedly. Currently, the following
|
||||
verbosity levels exist:
|
||||
Prints out the Nix version number on standard output and exits.
|
||||
|
||||
- 0\
|
||||
“Errors only”: only print messages explaining why the Nix
|
||||
invocation failed.
|
||||
- <span id="opt-verbose">[`--verbose`](#opt-verbose)</span> / `-v`
|
||||
|
||||
- 1\
|
||||
“Informational”: print *useful* messages about what Nix is
|
||||
doing. This is the default.
|
||||
Increases the level of verbosity of diagnostic messages printed on standard error.
|
||||
For each Nix operation, the information printed on standard output is well-defined;
|
||||
any diagnostic information is printed on standard error, never on standard output.
|
||||
|
||||
- 2\
|
||||
“Talkative”: print more informational messages.
|
||||
This option may be specified repeatedly.
|
||||
Currently, the following verbosity levels exist:
|
||||
|
||||
- 3\
|
||||
“Chatty”: print even more informational messages.
|
||||
- `0` “Errors only”
|
||||
|
||||
- 4\
|
||||
“Debug”: print debug information.
|
||||
Only print messages explaining why the Nix invocation failed.
|
||||
|
||||
- 5\
|
||||
“Vomit”: print vast amounts of debug information.
|
||||
- `1` “Informational”
|
||||
|
||||
- <span id="opt-quiet">[`--quiet`](#opt-quiet)</span>\
|
||||
Decreases the level of verbosity of diagnostic messages printed on
|
||||
standard error. This is the inverse option to `-v` / `--verbose`.
|
||||
Print *useful* messages about what Nix is doing.
|
||||
This is the default.
|
||||
|
||||
This option may be specified repeatedly. See the previous verbosity
|
||||
levels list.
|
||||
- `2` “Talkative”
|
||||
|
||||
- <span id="opt-log-format">[`--log-format`](#opt-log-format)</span> *format*\
|
||||
This option can be used to change the output of the log format, with
|
||||
*format* being one of:
|
||||
Print more informational messages.
|
||||
|
||||
- raw\
|
||||
This is the raw format, as outputted by nix-build.
|
||||
- `3` “Chatty”
|
||||
|
||||
- internal-json\
|
||||
Outputs the logs in a structured manner.
|
||||
Print even more informational messages.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> While the schema itself is relatively stable, the format of
|
||||
> the error-messages (namely of the `msg`-field) can change
|
||||
> between releases.
|
||||
- `4` “Debug”
|
||||
|
||||
- bar\
|
||||
Only display a progress bar during the builds.
|
||||
Print debug information.
|
||||
|
||||
- bar-with-logs\
|
||||
Display the raw logs, with the progress bar at the bottom.
|
||||
- `5` “Vomit”
|
||||
|
||||
- <span id="opt-no-build-output">[`--no-build-output`](#opt-no-build-output)</span> / `-Q`\
|
||||
By default, output written by builders to standard output and
|
||||
standard error is echoed to the Nix command's standard error. This
|
||||
option suppresses this behaviour. Note that the builder's standard
|
||||
output and error are always written to a log file in
|
||||
`prefix/nix/var/log/nix`.
|
||||
Print vast amounts of debug information.
|
||||
|
||||
- <span id="opt-max-jobs">[`--max-jobs`](#opt-max-jobs)</span> / `-j` *number*\
|
||||
Sets the maximum number of build jobs that Nix will perform in
|
||||
parallel to the specified number. Specify `auto` to use the number
|
||||
of CPUs in the system. The default is specified by the `max-jobs`
|
||||
configuration setting, which itself defaults to `1`. A higher
|
||||
value is useful on SMP systems or to exploit I/O latency.
|
||||
- <span id="opt-quiet">[`--quiet`](#opt-quiet)</span>
|
||||
|
||||
Setting it to `0` disallows building on the local machine, which is
|
||||
useful when you want builds to happen only on remote builders.
|
||||
Decreases the level of verbosity of diagnostic messages printed on standard error.
|
||||
This is the inverse option to `-v` / `--verbose`.
|
||||
|
||||
- <span id="opt-cores">[`--cores`](#opt-cores)</span>\
|
||||
Sets the value of the `NIX_BUILD_CORES` environment variable in
|
||||
the invocation of builders. Builders can use this variable at
|
||||
their discretion to control the maximum amount of parallelism. For
|
||||
instance, in Nixpkgs, if the derivation attribute
|
||||
`enableParallelBuilding` is set to `true`, the builder passes the
|
||||
`-jN` flag to GNU Make. It defaults to the value of the `cores`
|
||||
configuration setting, if set, or `1` otherwise. The value `0`
|
||||
means that the builder should use all available CPU cores in the
|
||||
system.
|
||||
This option may be specified repeatedly.
|
||||
See the previous verbosity levels list.
|
||||
|
||||
- <span id="opt-max-silent-time">[`--max-silent-time`](#opt-max-silent-time)</span>\
|
||||
Sets the maximum number of seconds that a builder can go without
|
||||
producing any data on standard output or standard error. The
|
||||
default is specified by the `max-silent-time` configuration
|
||||
setting. `0` means no time-out.
|
||||
- <span id="opt-log-format">[`--log-format`](#opt-log-format)</span> *format*
|
||||
|
||||
- <span id="opt-timeout">[`--timeout`](#opt-timeout)</span>\
|
||||
Sets the maximum number of seconds that a builder can run. The
|
||||
default is specified by the `timeout` configuration setting. `0`
|
||||
means no timeout.
|
||||
This option can be used to change the output of the log format, with *format* being one of:
|
||||
|
||||
- <span id="opt-keep-going">[`--keep-going`](#opt-keep-going)</span> / `-k`\
|
||||
Keep going in case of failed builds, to the greatest extent
|
||||
possible. That is, if building an input of some derivation fails,
|
||||
Nix will still build the other inputs, but not the derivation
|
||||
itself. Without this option, Nix stops if any build fails (except
|
||||
for builds of substitutes), possibly killing builds in progress (in
|
||||
case of parallel or distributed builds).
|
||||
- `raw`
|
||||
|
||||
- <span id="opt-keep-failed">[`--keep-failed`](#opt-keep-failed)</span> / `-K`\
|
||||
Specifies that in case of a build failure, the temporary directory
|
||||
(usually in `/tmp`) in which the build takes place should not be
|
||||
deleted. The path of the build directory is printed as an
|
||||
informational message.
|
||||
This is the raw format, as outputted by nix-build.
|
||||
|
||||
- <span id="opt-fallback">[`--fallback`](#opt-fallback)</span>\
|
||||
Whenever Nix attempts to build a derivation for which substitutes
|
||||
are known for each output path, but realising the output paths
|
||||
through the substitutes fails, fall back on building the derivation.
|
||||
- `internal-json`
|
||||
|
||||
The most common scenario in which this is useful is when we have
|
||||
registered substitutes in order to perform binary distribution from,
|
||||
say, a network repository. If the repository is down, the
|
||||
realisation of the derivation will fail. When this option is
|
||||
specified, Nix will build the derivation instead. Thus, installation
|
||||
from binaries falls back on installation from source. This option is
|
||||
not the default since it is generally not desirable for a transient
|
||||
failure in obtaining the substitutes to lead to a full build from
|
||||
source (with the related consumption of resources).
|
||||
Outputs the logs in a structured manner.
|
||||
|
||||
- <span id="opt-readonly-mode">[`--readonly-mode`](#opt-readonly-mode)</span>\
|
||||
When this option is used, no attempt is made to open the Nix
|
||||
database. Most Nix operations do need database access, so those
|
||||
operations will fail.
|
||||
> **Warning**
|
||||
>
|
||||
> While the schema itself is relatively stable, the format of
|
||||
> the error-messages (namely of the `msg`-field) can change
|
||||
> between releases.
|
||||
|
||||
- <span id="opt-arg">[`--arg`](#opt-arg)</span> *name* *value*\
|
||||
This option is accepted by `nix-env`, `nix-instantiate`,
|
||||
`nix-shell` and `nix-build`. When evaluating Nix expressions, the
|
||||
expression evaluator will automatically try to call functions that
|
||||
it encounters. It can automatically call functions for which every
|
||||
argument has a [default
|
||||
value](@docroot@/language/constructs.md#functions) (e.g.,
|
||||
`{ argName ? defaultValue }: ...`). With `--arg`, you can also
|
||||
call functions that have arguments without a default value (or
|
||||
override a default value). That is, if the evaluator encounters a
|
||||
function with an argument named *name*, it will call it with value
|
||||
*value*.
|
||||
- `bar`
|
||||
|
||||
For instance, the top-level `default.nix` in Nixpkgs is actually a
|
||||
function:
|
||||
Only display a progress bar during the builds.
|
||||
|
||||
```nix
|
||||
{ # The system (e.g., `i686-linux') for which to build the packages.
|
||||
system ? builtins.currentSystem
|
||||
...
|
||||
}: ...
|
||||
```
|
||||
- `bar-with-logs`
|
||||
|
||||
So if you call this Nix expression (e.g., when you do `nix-env --install --attr
|
||||
pkgname`), the function will be called automatically using the
|
||||
value [`builtins.currentSystem`](@docroot@/language/builtins.md) for
|
||||
the `system` argument. You can override this using `--arg`, e.g.,
|
||||
`nix-env --install --attr pkgname --arg system \"i686-freebsd\"`. (Note that
|
||||
since the argument is a Nix string literal, you have to escape the
|
||||
quotes.)
|
||||
Display the raw logs, with the progress bar at the bottom.
|
||||
|
||||
- <span id="opt-argstr">[`--argstr`](#opt-argstr)</span> *name* *value*\
|
||||
This option is like `--arg`, only the value is not a Nix
|
||||
expression but a string. So instead of `--arg system
|
||||
\"i686-linux\"` (the outer quotes are to keep the shell happy) you
|
||||
can say `--argstr system i686-linux`.
|
||||
- <span id="opt-no-build-output">[`--no-build-output`](#opt-no-build-output)</span> / `-Q`
|
||||
|
||||
- <span id="opt-attr">[`--attr`](#opt-attr)</span> / `-A` *attrPath*\
|
||||
Select an attribute from the top-level Nix expression being
|
||||
evaluated. (`nix-env`, `nix-instantiate`, `nix-build` and
|
||||
`nix-shell` only.) The *attribute path* *attrPath* is a sequence
|
||||
of attribute names separated by dots. For instance, given a
|
||||
top-level Nix expression *e*, the attribute path `xorg.xorgserver`
|
||||
would cause the expression `e.xorg.xorgserver` to be used. See
|
||||
[`nix-env --install`](@docroot@/command-ref/nix-env/install.md) for some
|
||||
concrete examples.
|
||||
By default, output written by builders to standard output and standard error is echoed to the Nix command's standard error.
|
||||
This option suppresses this behaviour.
|
||||
Note that the builder's standard output and error are always written to a log file in `prefix/nix/var/log/nix`.
|
||||
|
||||
In addition to attribute names, you can also specify array indices.
|
||||
For instance, the attribute path `foo.3.bar` selects the `bar`
|
||||
attribute of the fourth element of the array in the `foo` attribute
|
||||
of the top-level expression.
|
||||
- <span id="opt-max-jobs">[`--max-jobs`](#opt-max-jobs)</span> / `-j` *number*
|
||||
|
||||
- <span id="opt-expr">[`--expr`](#opt-expr)</span> / `-E`\
|
||||
Interpret the command line arguments as a list of Nix expressions to
|
||||
be parsed and evaluated, rather than as a list of file names of Nix
|
||||
expressions. (`nix-instantiate`, `nix-build` and `nix-shell` only.)
|
||||
Sets the maximum number of build jobs that Nix will perform in parallel to the specified number.
|
||||
Specify `auto` to use the number of CPUs in the system.
|
||||
The default is specified by the `max-jobs` configuration setting, which itself defaults to `1`.
|
||||
A higher value is useful on SMP systems or to exploit I/O latency.
|
||||
|
||||
For `nix-shell`, this option is commonly used to give you a shell in
|
||||
which you can build the packages returned by the expression. If you
|
||||
want to get a shell which contain the *built* packages ready for
|
||||
use, give your expression to the `nix-shell --packages ` convenience flag
|
||||
instead.
|
||||
Setting it to `0` disallows building on the local machine, which is useful when you want builds to happen only on remote builders.
|
||||
|
||||
- <span id="opt-I">[`-I`](#opt-I)</span> *path*\
|
||||
Add an entry to the [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path).
|
||||
This option may be given multiple times.
|
||||
Paths added through `-I` take precedence over [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH).
|
||||
- <span id="opt-cores">[`--cores`](#opt-cores)</span>
|
||||
|
||||
- <span id="opt-option">[`--option`](#opt-option)</span> *name* *value*\
|
||||
Set the Nix configuration option *name* to *value*. This overrides
|
||||
settings in the Nix configuration file (see nix.conf5).
|
||||
Sets the value of the `NIX_BUILD_CORES` environment variable in the invocation of builders.
|
||||
Builders can use this variable at their discretion to control the maximum amount of parallelism.
|
||||
For instance, in Nixpkgs, if the derivation attribute `enableParallelBuilding` is set to `true`, the builder passes the `-jN` flag to GNU Make.
|
||||
It defaults to the value of the `cores` configuration setting, if set, or `1` otherwise.
|
||||
The value `0` means that the builder should use all available CPU cores in the system.
|
||||
|
||||
- <span id="opt-repair">[`--repair`](#opt-repair)</span>\
|
||||
Fix corrupted or missing store paths by redownloading or rebuilding
|
||||
them. Note that this is slow because it requires computing a
|
||||
cryptographic hash of the contents of every path in the closure of
|
||||
the build. Also note the warning under `nix-store --repair-path`.
|
||||
- <span id="opt-max-silent-time">[`--max-silent-time`](#opt-max-silent-time)</span>
|
||||
|
||||
Sets the maximum number of seconds that a builder can go without producing any data on standard output or standard error.
|
||||
The default is specified by the `max-silent-time` configuration setting.
|
||||
`0` means no time-out.
|
||||
|
||||
- <span id="opt-timeout">[`--timeout`](#opt-timeout)</span>
|
||||
|
||||
Sets the maximum number of seconds that a builder can run.
|
||||
The default is specified by the `timeout` configuration setting.
|
||||
`0` means no timeout.
|
||||
|
||||
- <span id="opt-keep-going">[`--keep-going`](#opt-keep-going)</span> / `-k`
|
||||
|
||||
Keep going in case of failed builds, to the greatest extent possible.
|
||||
That is, if building an input of some derivation fails, Nix will still build the other inputs, but not the derivation itself.
|
||||
Without this option, Nix stops if any build fails (except for builds of substitutes), possibly killing builds in progress (in case of parallel or distributed builds).
|
||||
|
||||
- <span id="opt-keep-failed">[`--keep-failed`](#opt-keep-failed)</span> / `-K`
|
||||
|
||||
Specifies that in case of a build failure, the temporary directory (usually in `/tmp`) in which the build takes place should not be deleted.
|
||||
The path of the build directory is printed as an informational message.
|
||||
|
||||
- <span id="opt-fallback">[`--fallback`](#opt-fallback)</span>
|
||||
|
||||
Whenever Nix attempts to build a derivation for which substitutes are known for each output path, but realising the output paths through the substitutes fails, fall back on building the derivation.
|
||||
|
||||
The most common scenario in which this is useful is when we have registered substitutes in order to perform binary distribution from, say, a network repository.
|
||||
If the repository is down, the realisation of the derivation will fail.
|
||||
When this option is specified, Nix will build the derivation instead.
|
||||
Thus, installation from binaries falls back on installation from source.
|
||||
This option is not the default since it is generally not desirable for a transient failure in obtaining the substitutes to lead to a full build from source (with the related consumption of resources).
|
||||
|
||||
- <span id="opt-readonly-mode">[`--readonly-mode`](#opt-readonly-mode)</span>
|
||||
|
||||
When this option is used, no attempt is made to open the Nix database.
|
||||
Most Nix operations do need database access, so those operations will fail.
|
||||
|
||||
- <span id="opt-arg">[`--arg`](#opt-arg)</span> *name* *value*
|
||||
|
||||
This option is accepted by `nix-env`, `nix-instantiate`, `nix-shell` and `nix-build`.
|
||||
When evaluating Nix expressions, the expression evaluator will automatically try to call functions that it encounters.
|
||||
It can automatically call functions for which every argument has a [default value](@docroot@/language/constructs.md#functions) (e.g., `{ argName ? defaultValue }: ...`).
|
||||
|
||||
With `--arg`, you can also call functions that have arguments without a default value (or override a default value).
|
||||
That is, if the evaluator encounters a function with an argument named *name*, it will call it with value *value*.
|
||||
|
||||
For instance, the top-level `default.nix` in Nixpkgs is actually a function:
|
||||
|
||||
```nix
|
||||
{ # The system (e.g., `i686-linux') for which to build the packages.
|
||||
system ? builtins.currentSystem
|
||||
...
|
||||
}: ...
|
||||
```
|
||||
|
||||
So if you call this Nix expression (e.g., when you do `nix-env --install --attr pkgname`), the function will be called automatically using the value [`builtins.currentSystem`](@docroot@/language/builtins.md) for the `system` argument.
|
||||
You can override this using `--arg`, e.g., `nix-env --install --attr pkgname --arg system \"i686-freebsd\"`.
|
||||
(Note that since the argument is a Nix string literal, you have to escape the quotes.)
|
||||
|
||||
- <span id="opt-argstr">[`--argstr`](#opt-argstr)</span> *name* *value*
|
||||
|
||||
This option is like `--arg`, only the value is not a Nix expression but a string.
|
||||
So instead of `--arg system \"i686-linux\"` (the outer quotes are to keep the shell happy) you can say `--argstr system i686-linux`.
|
||||
|
||||
- <span id="opt-attr">[`--attr`](#opt-attr)</span> / `-A` *attrPath*
|
||||
|
||||
Select an attribute from the top-level Nix expression being evaluated.
|
||||
(`nix-env`, `nix-instantiate`, `nix-build` and `nix-shell` only.)
|
||||
The *attribute path* *attrPath* is a sequence of attribute names separated by dots.
|
||||
For instance, given a top-level Nix expression *e*, the attribute path `xorg.xorgserver` would cause the expression `e.xorg.xorgserver` to be used.
|
||||
See [`nix-env --install`](@docroot@/command-ref/nix-env/install.md) for some concrete examples.
|
||||
|
||||
In addition to attribute names, you can also specify array indices.
|
||||
For instance, the attribute path `foo.3.bar` selects the `bar`
|
||||
attribute of the fourth element of the array in the `foo` attribute
|
||||
of the top-level expression.
|
||||
|
||||
- <span id="opt-expr">[`--expr`](#opt-expr)</span> / `-E`
|
||||
|
||||
Interpret the command line arguments as a list of Nix expressions to be parsed and evaluated, rather than as a list of file names of Nix expressions.
|
||||
(`nix-instantiate`, `nix-build` and `nix-shell` only.)
|
||||
|
||||
For `nix-shell`, this option is commonly used to give you a shell in which you can build the packages returned by the expression.
|
||||
If you want to get a shell which contain the *built* packages ready for use, give your expression to the `nix-shell --packages ` convenience flag instead.
|
||||
|
||||
- <span id="opt-I">[`-I`](#opt-I)</span> *path*
|
||||
|
||||
Add an entry to the [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path).
|
||||
This option may be given multiple times.
|
||||
Paths added through `-I` take precedence over [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH).
|
||||
|
||||
- <span id="opt-option">[`--option`](#opt-option)</span> *name* *value*
|
||||
|
||||
Set the Nix configuration option *name* to *value*.
|
||||
This overrides settings in the Nix configuration file (see nix.conf5).
|
||||
|
||||
- <span id="opt-repair">[`--repair`](#opt-repair)</span>
|
||||
|
||||
Fix corrupted or missing store paths by redownloading or rebuilding them.
|
||||
Note that this is slow because it requires computing a cryptographic hash of the contents of every path in the closure of the build.
|
||||
Also note the warning under `nix-store --repair-path`.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
\--prebuilt-only
|
||||
|
||||
\-b
|
||||
|
||||
\--attr
|
||||
|
||||
\-A
|
||||
|
||||
\--from-expression
|
||||
|
||||
\-E
|
||||
|
||||
\--from-profile
|
||||
|
||||
path
|
|
@ -1 +1,8 @@
|
|||
# Contributing
|
||||
# Development
|
||||
|
||||
Nix is developed on GitHub.
|
||||
Check the [contributing guide](https://github.com/NixOS/nix/blob/master/CONTRIBUTING.md) if you want to get involved.
|
||||
|
||||
This chapter is a collection of guides for making changes to the code and documentation.
|
||||
|
||||
If you're not sure where to start, try to [compile Nix from source](./hacking.md) and consider [making improvements to documentation](./documentation.md).
|
||||
|
|
181
doc/manual/src/contributing/documentation.md
Normal file
181
doc/manual/src/contributing/documentation.md
Normal file
|
@ -0,0 +1,181 @@
|
|||
# Contributing documentation
|
||||
|
||||
Improvements to documentation are very much appreciated, and a good way to start out with contributing to Nix.
|
||||
|
||||
This is how you can help:
|
||||
- Address [open issues with documentation](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation)
|
||||
- Review [pull requests concerning documentation](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+label%3Adocumentation)
|
||||
|
||||
Incremental refactorings of the documentation build setup to make it faster or easier to understand and maintain are also welcome.
|
||||
|
||||
## Building the manual
|
||||
|
||||
Build the manual from scratch:
|
||||
|
||||
```console
|
||||
nix-build $(nix-instantiate)'!doc'
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```console
|
||||
nix build .#^doc
|
||||
```
|
||||
|
||||
and open `./result-doc/share/doc/nix/manual/index.html`.
|
||||
|
||||
To build the manual incrementally, [enter the development shell](./hacking.md) and run:
|
||||
|
||||
```console
|
||||
make manual-html -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
and open `./outputs/out/share/doc/nix/manual/language/index.html`.
|
||||
|
||||
In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building:
|
||||
|
||||
[Makefile for the manual]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||
|
||||
```console
|
||||
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make manual-html -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
## Style guide
|
||||
|
||||
The goal of this style guide is to make it such that
|
||||
- The manual is easy to search and skim for relevant information
|
||||
- Documentation sources are easy to edit
|
||||
- Changes to documentation are easy to review
|
||||
|
||||
You will notice that this is not implemented consistently yet.
|
||||
Please follow the guide when making additions or changes to existing documentation.
|
||||
Do not make sweeping changes, unless they are programmatic and can be validated easily.
|
||||
|
||||
### Language
|
||||
|
||||
This manual is [reference documentation](https://diataxis.fr/reference/).
|
||||
The typical usage pattern is to look up isolated pieces of information.
|
||||
It should therefore aim to be correct, consistent, complete, and easy to navigate at a glance.
|
||||
|
||||
- Aim for clarity and brevity.
|
||||
|
||||
Please take the time to read the [plain language guidelines](https://www.plainlanguage.gov/guidelines/) for details.
|
||||
|
||||
- Describe the subject factually.
|
||||
|
||||
In particular, do not make value judgements or recommendations.
|
||||
Check the code or add tests if in doubt.
|
||||
|
||||
- Provide complete, minimal examples, and explain them.
|
||||
|
||||
Readers should be able to try examples verbatim and get the same results as shown in the manual.
|
||||
Always describe in words what a given example does.
|
||||
|
||||
Non-trivial examples may need additional explanation, especially if they use concepts from outside the given context.
|
||||
|
||||
- Use British English.
|
||||
|
||||
This is a somewhat arbitrary choice to force consistency, and accounts for the fact that a majority of Nix users and developers are from Europe.
|
||||
|
||||
### Links and anchors
|
||||
|
||||
Reference documentation must be readable in arbitrary order.
|
||||
Readers cannot be expected to have any particular prerequisite knowledge about Nix.
|
||||
While the table of contents can provide guidance and full-text search can help, they are most likely to find what they need by following sensible cross-references.
|
||||
|
||||
- Link to technical terms
|
||||
|
||||
When mentioning Nix-specific concepts, commands, options, settings, etc., link to appropriate documentation.
|
||||
Also link to external tools or concepts, especially if their meaning may be ambiguous.
|
||||
You may also want to link to definitions of less common technical terms.
|
||||
|
||||
Then readers won't have to actively search for definitions and are more likely to discover relevant information on their own.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> `man` and `--help` pages don't display links.
|
||||
> Use appropriate link texts such that readers of terminal output can infer search terms.
|
||||
|
||||
- Do not break existing URLs between releases.
|
||||
|
||||
There are countless links in the wild pointing to old versions of the manual.
|
||||
We want people to find up-to-date documentation when following popular advice.
|
||||
|
||||
- When moving files, update [redirects on nixos.org](https://github.com/NixOS/nixos-homepage/blob/master/netlify.toml).
|
||||
|
||||
This is especially important when moving information out of the Nix manual to other resources.
|
||||
|
||||
- When changing anchors, update [client-side redirects](https://github.com/NixOS/nix/blob/master/doc/manual/redirects.js)
|
||||
|
||||
The current setup is cumbersome, and help making better automation is appreciated.
|
||||
|
||||
The build checks for broken internal links with.
|
||||
This happens late in the process, so [building the whole manual](#building-the-manual) is not suitable for iterating quickly.
|
||||
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||
|
||||
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
||||
[URI fragments]: https://en.wikipedia.org/wiki/URI_fragment
|
||||
|
||||
### Markdown conventions
|
||||
|
||||
The manual is written in markdown, and rendered with [mdBook](https://github.com/rust-lang/mdBook) for the web and with [lowdown](https://github.com/kristapsdz/lowdown) for `man` pages and `--help` output.
|
||||
|
||||
For supported markdown features, refer to:
|
||||
- [mdBook documentation](https://rust-lang.github.io/mdBook/format/markdown.html)
|
||||
- [lowdown documentation](https://kristaps.bsd.lv/lowdown/)
|
||||
|
||||
Please observe these guidelines to ease reviews:
|
||||
|
||||
- Write one sentence per line.
|
||||
|
||||
This makes long sentences immediately visible, and makes it easier to review changes and make direct suggestions.
|
||||
|
||||
- Use reference links – sparingly – to ease source readability.
|
||||
Put definitions close to their first use.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
A [store object] contains a [file system object] and [references] to other store objects.
|
||||
|
||||
[store object]: @docroot@/glossary.md#gloss-store-object
|
||||
[file system object]: @docroot@/architecture/file-system-object.md
|
||||
[references]: @docroot@/glossary.md#gloss-reference
|
||||
```
|
||||
|
||||
- Use admonitions of the following form:
|
||||
|
||||
```
|
||||
> **Note**
|
||||
>
|
||||
> This is a note.
|
||||
```
|
||||
|
||||
### The `@docroot@` variable
|
||||
|
||||
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||
|
||||
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
||||
|
||||
If the `@docroot@` literal appears in an error message from the [`mdbook-linkcheck`] tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
||||
See existing `@docroot@` logic in the [Makefile for the manual].
|
||||
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
||||
|
||||
## API documentation
|
||||
|
||||
[Doxygen API documentation] is available online.
|
||||
You can also build and view it yourself:
|
||||
|
||||
[Doxygen API documentation]: https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs
|
||||
|
||||
```console
|
||||
# nix build .#hydraJobs.internal-api-docs
|
||||
# xdg-open ./result/share/doc/nix/internal-api/html/index.html
|
||||
```
|
||||
|
||||
or inside `nix-shell` or `nix develop`:
|
||||
|
||||
```
|
||||
# make internal-api-html
|
||||
# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
|
||||
```
|
|
@ -42,8 +42,8 @@ $ nix develop .#native-clang11StdenvPackages
|
|||
To build Nix itself in this shell:
|
||||
|
||||
```console
|
||||
[nix-shell]$ ./bootstrap.sh
|
||||
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
||||
[nix-shell]$ autoreconfPhase
|
||||
[nix-shell]$ configurePhase
|
||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
|
@ -86,7 +86,7 @@ $ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages
|
|||
To build Nix itself in this shell:
|
||||
|
||||
```console
|
||||
[nix-shell]$ ./bootstrap.sh
|
||||
[nix-shell]$ autoreconfPhase
|
||||
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
@ -220,68 +220,3 @@ Configure your editor to use the `clangd` from the shell, either by running it i
|
|||
> For some editors (e.g. Visual Studio Code), you may need to install a [special extension](https://open-vsx.org/extension/llvm-vs-code-extensions/vscode-clangd) for the editor to interact with `clangd`.
|
||||
> Some other editors (e.g. Emacs, Vim) need a plugin to support LSP servers in general (e.g. [lsp-mode](https://github.com/emacs-lsp/lsp-mode) for Emacs and [vim-lsp](https://github.com/prabirshrestha/vim-lsp) for vim).
|
||||
> Editor-specific setup is typically opinionated, so we will not cover it here in more detail.
|
||||
|
||||
### Checking links in the manual
|
||||
|
||||
The build checks for broken internal links.
|
||||
This happens late in the process, so `nix build` is not suitable for iterating.
|
||||
To build the manual incrementally, run:
|
||||
|
||||
```console
|
||||
make html -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
In order to reflect changes to the [Makefile], clear all generated files before re-building:
|
||||
|
||||
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||
|
||||
```console
|
||||
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||
|
||||
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
||||
[URI fragments]: https://en.wikipedia.org/wiki/URI_fragment
|
||||
|
||||
#### `@docroot@` variable
|
||||
|
||||
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||
|
||||
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
||||
|
||||
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
||||
See existing `@docroot@` logic in the [Makefile].
|
||||
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
||||
|
||||
## API documentation
|
||||
|
||||
Doxygen API documentation is [available
|
||||
online](https://hydra.nixos.org/job/nix/master/internal-api-docs/latest/download-by-type/doc/internal-api-docs). You
|
||||
can also build and view it yourself:
|
||||
|
||||
```console
|
||||
# nix build .#hydraJobs.internal-api-docs
|
||||
# xdg-open ./result/share/doc/nix/internal-api/html/index.html
|
||||
```
|
||||
|
||||
or inside a `nix develop` shell by running:
|
||||
|
||||
```
|
||||
# make internal-api-html
|
||||
# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
|
||||
```
|
||||
|
||||
## Coverage analysis
|
||||
|
||||
A coverage analysis report is [available
|
||||
online](https://hydra.nixos.org/job/nix/master/coverage/latest/download-by-type/report/coverage). You
|
||||
can build it yourself:
|
||||
|
||||
```
|
||||
# nix build .#hydraJobs.coverage
|
||||
# xdg-open ./result/coverage/index.html
|
||||
```
|
||||
|
||||
Metrics about the change in line/function coverage over time are also
|
||||
[available](https://hydra.nixos.org/job/nix/master/coverage#tabs-charts).
|
||||
|
|
|
@ -1,17 +1,88 @@
|
|||
# Running tests
|
||||
|
||||
## Coverage analysis
|
||||
|
||||
A [coverage analysis report] is available online
|
||||
You can build it yourself:
|
||||
|
||||
[coverage analysis report]: https://hydra.nixos.org/job/nix/master/coverage/latest/download-by-type/report/coverage
|
||||
|
||||
```
|
||||
# nix build .#hydraJobs.coverage
|
||||
# xdg-open ./result/coverage/index.html
|
||||
```
|
||||
|
||||
[Extensive records of build metrics](https://hydra.nixos.org/job/nix/master/coverage#tabs-charts), such as test coverage over time, are also available online.
|
||||
|
||||
## Unit-tests
|
||||
|
||||
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
||||
under `src/{library_name}/tests` using the
|
||||
[googletest](https://google.github.io/googletest/) and
|
||||
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
||||
The unit tests are defined using the [googletest] and [rapidcheck] frameworks.
|
||||
|
||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
||||
[googletest]: https://google.github.io/googletest/
|
||||
[rapidcheck]: https://github.com/emil-e/rapidcheck
|
||||
|
||||
### Source and header layout
|
||||
|
||||
> An example of some files, demonstrating much of what is described below
|
||||
>
|
||||
> ```
|
||||
> src
|
||||
> ├── libexpr
|
||||
> │ ├── value/context.hh
|
||||
> │ ├── value/context.cc
|
||||
> │ │
|
||||
> │ …
|
||||
> └── tests
|
||||
> │ ├── value/context.hh
|
||||
> │ ├── value/context.cc
|
||||
> │ │
|
||||
> │ …
|
||||
> │
|
||||
> ├── unit-test-data
|
||||
> │ ├── libstore
|
||||
> │ │ ├── worker-protocol/content-address.bin
|
||||
> │ │ …
|
||||
> │ …
|
||||
> …
|
||||
> ```
|
||||
|
||||
The unit tests for each Nix library (`libnixexpr`, `libnixstore`, etc..) live inside a directory `src/${library_shortname}/tests` within the directory for the library (`src/${library_shortname}`).
|
||||
|
||||
The data is in `unit-test-data`, with one subdir per library, with the same name as where the code goes.
|
||||
For example, `libnixstore` code is in `src/libstore`, and its test data is in `unit-test-data/libstore`.
|
||||
The path to the `unit-test-data` directory is passed to the unit test executable with the environment variable `_NIX_TEST_UNIT_DATA`.
|
||||
|
||||
> **Note**
|
||||
> Due to the way googletest works, downstream unit test executables will actually include and re-run upstream library tests.
|
||||
> Therefore it is important that the same value for `_NIX_TEST_UNIT_DATA` be used with the tests for each library.
|
||||
> That is why we have the test data nested within a single `unit-test-data` directory.
|
||||
|
||||
### Running tests
|
||||
|
||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`.
|
||||
Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable.
|
||||
|
||||
### Characterisation testing { #characaterisation-testing-unit }
|
||||
|
||||
See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing.
|
||||
|
||||
Like with the functional characterisation, `_NIX_TEST_ACCEPT=1` is also used.
|
||||
For example:
|
||||
```shell-session
|
||||
$ _NIX_TEST_ACCEPT=1 make libstore-tests-exe_RUN
|
||||
...
|
||||
[ SKIPPED ] WorkerProtoTest.string_read
|
||||
[ SKIPPED ] WorkerProtoTest.string_write
|
||||
[ SKIPPED ] WorkerProtoTest.storePath_read
|
||||
[ SKIPPED ] WorkerProtoTest.storePath_write
|
||||
...
|
||||
```
|
||||
will regenerate the "golden master" expected result for the `libnixstore` characterisation tests.
|
||||
The characterisation tests will mark themselves "skipped" since they regenerated the expected result instead of actually testing anything.
|
||||
|
||||
## Functional tests
|
||||
|
||||
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
||||
The functional tests reside under the `tests/functional` directory and are listed in `tests/functional/local.mk`.
|
||||
Each test is a bash script.
|
||||
|
||||
### Running the whole test suite
|
||||
|
@ -20,8 +91,8 @@ The whole test suite can be run with:
|
|||
|
||||
```shell-session
|
||||
$ make install && make installcheck
|
||||
ran test tests/foo.sh... [PASS]
|
||||
ran test tests/bar.sh... [PASS]
|
||||
ran test tests/functional/foo.sh... [PASS]
|
||||
ran test tests/functional/bar.sh... [PASS]
|
||||
...
|
||||
```
|
||||
|
||||
|
@ -29,14 +100,14 @@ ran test tests/bar.sh... [PASS]
|
|||
|
||||
Sometimes it is useful to group related tests so they can be easily run together without running the entire test suite.
|
||||
Each test group is in a subdirectory of `tests`.
|
||||
For example, `tests/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs.
|
||||
For example, `tests/functional/ca/local.mk` defines a `ca` test group for content-addressed derivation outputs.
|
||||
|
||||
That test group can be run like this:
|
||||
|
||||
```shell-session
|
||||
$ make ca.test-group -j50
|
||||
ran test tests/ca/nix-run.sh... [PASS]
|
||||
ran test tests/ca/import-derivation.sh... [PASS]
|
||||
ran test tests/functional/ca/nix-run.sh... [PASS]
|
||||
ran test tests/functional/ca/import-derivation.sh... [PASS]
|
||||
...
|
||||
```
|
||||
|
||||
|
@ -55,21 +126,21 @@ install-tests-groups += $(test-group-name)
|
|||
Individual tests can be run with `make`:
|
||||
|
||||
```shell-session
|
||||
$ make tests/${testName}.sh.test
|
||||
ran test tests/${testName}.sh... [PASS]
|
||||
$ make tests/functional/${testName}.sh.test
|
||||
ran test tests/functional/${testName}.sh... [PASS]
|
||||
```
|
||||
|
||||
or without `make`:
|
||||
|
||||
```shell-session
|
||||
$ ./mk/run-test.sh tests/${testName}.sh
|
||||
ran test tests/${testName}.sh... [PASS]
|
||||
$ ./mk/run-test.sh tests/functional/${testName}.sh
|
||||
ran test tests/functional/${testName}.sh... [PASS]
|
||||
```
|
||||
|
||||
To see the complete output, one can also run:
|
||||
|
||||
```shell-session
|
||||
$ ./mk/debug-test.sh tests/${testName}.sh
|
||||
$ ./mk/debug-test.sh tests/functional/${testName}.sh
|
||||
+ foo
|
||||
output from foo
|
||||
+ bar
|
||||
|
@ -104,7 +175,7 @@ edit it like so:
|
|||
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
||||
|
||||
```shell-session
|
||||
$ ./mk/debug-test.sh tests/${testName}.sh
|
||||
$ ./mk/debug-test.sh tests/functional/${testName}.sh
|
||||
...
|
||||
+ gdb blash blub
|
||||
GNU gdb (GDB) 12.1
|
||||
|
@ -115,17 +186,29 @@ GNU gdb (GDB) 12.1
|
|||
One can debug the Nix invocation in all the usual ways.
|
||||
For example, enter `run` to start the Nix invocation.
|
||||
|
||||
### Characterization testing
|
||||
### Troubleshooting
|
||||
|
||||
Occasionally, Nix utilizes a technique called [Characterization Testing](https://en.wikipedia.org/wiki/Characterization_test) as part of the functional tests.
|
||||
Sometimes running tests in the development shell may leave artefacts in the local repository.
|
||||
To remove any traces of that:
|
||||
|
||||
```console
|
||||
git clean -x --force tests
|
||||
```
|
||||
|
||||
### Characterisation testing { #characterisation-testing-functional }
|
||||
|
||||
Occasionally, Nix utilizes a technique called [Characterisation Testing](https://en.wikipedia.org/wiki/Characterization_test) as part of the functional tests.
|
||||
This technique is to include the exact output/behavior of a former version of Nix in a test in order to check that Nix continues to produce the same behavior going forward.
|
||||
|
||||
For example, this technique is used for the language tests, to check both the printed final value if evaluation was successful, and any errors and warnings encountered.
|
||||
|
||||
It is frequently useful to regenerate the expected output.
|
||||
To do that, rerun the failed test with `_NIX_TEST_ACCEPT=1`.
|
||||
(At least, this is the convention we've used for `tests/lang.sh`.
|
||||
If we add more characterization testing we should always strive to be consistent.)
|
||||
To do that, rerun the failed test(s) with `_NIX_TEST_ACCEPT=1`.
|
||||
For example:
|
||||
```bash
|
||||
_NIX_TEST_ACCEPT=1 make tests/functional/lang.sh.test
|
||||
```
|
||||
This convention is shared with the [characterisation unit tests](#characterisation-testing-unit) too.
|
||||
|
||||
An interesting situation to document is the case when these tests are "overfitted".
|
||||
The language tests are, again, an example of this.
|
||||
|
@ -138,7 +221,7 @@ Diagnostic outputs are indeed not a stable interface, but they still are importa
|
|||
By recording the expected output, the test suite guards against accidental changes, and ensure the *result* (not just the code that implements it) of the diagnostic code paths are under code review.
|
||||
Regressions are caught, and improvements always show up in code review.
|
||||
|
||||
To ensure that characterization testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`.
|
||||
To ensure that characterisation testing doesn't make it harder to intentionally change these interfaces, there always must be an easy way to regenerate the expected output, as we do with `_NIX_TEST_ACCEPT=1`.
|
||||
|
||||
## Integration tests
|
||||
|
||||
|
@ -152,7 +235,7 @@ You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-
|
|||
|
||||
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
||||
|
||||
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
||||
Creating a Cachix cache for your installer tests and adding its authorisation token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
||||
|
||||
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
||||
- `x86_64-linux`
|
||||
|
|
|
@ -1,236 +1,283 @@
|
|||
# Glossary
|
||||
|
||||
- [derivation]{#gloss-derivation}\
|
||||
A description of a build task. The result of a derivation is a
|
||||
store object. Derivations are typically specified in Nix expressions
|
||||
using the [`derivation` primitive](./language/derivations.md). These are
|
||||
translated into low-level *store derivations* (implicitly by
|
||||
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
||||
- [derivation]{#gloss-derivation}
|
||||
|
||||
[derivation]: #gloss-derivation
|
||||
A description of a build task. The result of a derivation is a
|
||||
store object. Derivations are typically specified in Nix expressions
|
||||
using the [`derivation` primitive](./language/derivations.md). These are
|
||||
translated into low-level *store derivations* (implicitly by
|
||||
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
||||
|
||||
- [store derivation]{#gloss-store-derivation}\
|
||||
A [derivation] represented as a `.drv` file in the [store].
|
||||
It has a [store path], like any [store object].
|
||||
[derivation]: #gloss-derivation
|
||||
|
||||
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
|
||||
- [store derivation]{#gloss-store-derivation}
|
||||
|
||||
See [`nix derivation show`](./command-ref/new-cli/nix3-derivation-show.md) (experimental) for displaying the contents of store derivations.
|
||||
A [derivation] represented as a `.drv` file in the [store].
|
||||
It has a [store path], like any [store object].
|
||||
|
||||
[store derivation]: #gloss-store-derivation
|
||||
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
|
||||
|
||||
- [instantiate]{#gloss-instantiate}, instantiation\
|
||||
Translate a [derivation] into a [store derivation].
|
||||
See [`nix derivation show`](./command-ref/new-cli/nix3-derivation-show.md) (experimental) for displaying the contents of store derivations.
|
||||
|
||||
See [`nix-instantiate`](./command-ref/nix-instantiate.md).
|
||||
[store derivation]: #gloss-store-derivation
|
||||
|
||||
[instantiate]: #gloss-instantiate
|
||||
- [instantiate]{#gloss-instantiate}, instantiation
|
||||
|
||||
- [realise]{#gloss-realise}, realisation\
|
||||
Ensure a [store path] is [valid][validity].
|
||||
Translate a [derivation] into a [store derivation].
|
||||
|
||||
This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter].
|
||||
See [`nix-instantiate`](./command-ref/nix-instantiate.md).
|
||||
|
||||
See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md).
|
||||
[instantiate]: #gloss-instantiate
|
||||
|
||||
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
||||
- [realise]{#gloss-realise}, realisation
|
||||
|
||||
[realise]: #gloss-realise
|
||||
Ensure a [store path] is [valid][validity].
|
||||
|
||||
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
|
||||
A derivation which has the
|
||||
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||
attribute set to `true`.
|
||||
This can be achieved by:
|
||||
- Fetching a pre-built [store object] from a [substituter]
|
||||
- Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
|
||||
- Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
|
||||
<!-- TODO: link [running] to build process page, #8888 -->
|
||||
|
||||
- [fixed-output derivation]{#gloss-fixed-output-derivation}\
|
||||
A derivation which includes the
|
||||
[`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute.
|
||||
See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
|
||||
|
||||
- [store]{#gloss-store}\
|
||||
The location in the file system where store objects live. Typically
|
||||
`/nix/store`.
|
||||
See also [`nix-build`](./command-ref/nix-build.md) and [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
||||
|
||||
From the perspective of the location where Nix is
|
||||
invoked, the Nix store can be referred to
|
||||
as a "_local_" or a "_remote_" one:
|
||||
[realise]: #gloss-realise
|
||||
|
||||
+ A [local store]{#gloss-local-store} exists on the filesystem of
|
||||
the machine where Nix is invoked. You can use other
|
||||
local stores by passing the `--store` flag to the
|
||||
`nix` command. Local stores can be used for building derivations.
|
||||
- [content-addressed derivation]{#gloss-content-addressed-derivation}
|
||||
|
||||
+ A *remote store* exists anywhere other than the
|
||||
local filesystem. One example is the `/nix/store`
|
||||
directory on another machine, accessed via `ssh` or
|
||||
served by the `nix-serve` Perl script.
|
||||
A derivation which has the
|
||||
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||
attribute set to `true`.
|
||||
|
||||
[store]: #gloss-store
|
||||
[local store]: #gloss-local-store
|
||||
- [fixed-output derivation]{#gloss-fixed-output-derivation}
|
||||
|
||||
- [chroot store]{#gloss-chroot-store}\
|
||||
A [local store] whose canonical path is anything other than `/nix/store`.
|
||||
A derivation which includes the
|
||||
[`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute.
|
||||
|
||||
- [binary cache]{#gloss-binary-cache}\
|
||||
A *binary cache* is a Nix store which uses a different format: its
|
||||
metadata and signatures are kept in `.narinfo` files rather than in a
|
||||
[Nix database]. This different format simplifies serving store objects
|
||||
over the network, but cannot host builds. Examples of binary caches
|
||||
include S3 buckets and the [NixOS binary cache](https://cache.nixos.org).
|
||||
- [store]{#gloss-store}
|
||||
|
||||
- [store path]{#gloss-store-path}\
|
||||
The location of a [store object] in the file system, i.e., an
|
||||
immediate child of the Nix store directory.
|
||||
The location in the file system where store objects live. Typically
|
||||
`/nix/store`.
|
||||
|
||||
Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
|
||||
From the perspective of the location where Nix is
|
||||
invoked, the Nix store can be referred to
|
||||
as a "_local_" or a "_remote_" one:
|
||||
|
||||
[store path]: #gloss-store-path
|
||||
+ A [local store]{#gloss-local-store} exists on the filesystem of
|
||||
the machine where Nix is invoked. You can use other
|
||||
local stores by passing the `--store` flag to the
|
||||
`nix` command. Local stores can be used for building derivations.
|
||||
|
||||
- [file system object]{#gloss-store-object}\
|
||||
The Nix data model for representing simplified file system data.
|
||||
+ A *remote store* exists anywhere other than the
|
||||
local filesystem. One example is the `/nix/store`
|
||||
directory on another machine, accessed via `ssh` or
|
||||
served by the `nix-serve` Perl script.
|
||||
|
||||
See [File System Object](@docroot@/architecture/file-system-object.md) for details.
|
||||
[store]: #gloss-store
|
||||
[local store]: #gloss-local-store
|
||||
|
||||
[file system object]: #gloss-file-system-object
|
||||
- [chroot store]{#gloss-chroot-store}
|
||||
|
||||
- [store object]{#gloss-store-object}\
|
||||
A [local store] whose canonical path is anything other than `/nix/store`.
|
||||
|
||||
A store object consists of a [file system object], [reference]s to other store objects, and other metadata.
|
||||
It can be referred to by a [store path].
|
||||
- [binary cache]{#gloss-binary-cache}
|
||||
|
||||
[store object]: #gloss-store-object
|
||||
A *binary cache* is a Nix store which uses a different format: its
|
||||
metadata and signatures are kept in `.narinfo` files rather than in a
|
||||
[Nix database]. This different format simplifies serving store objects
|
||||
over the network, but cannot host builds. Examples of binary caches
|
||||
include S3 buckets and the [NixOS binary cache](https://cache.nixos.org).
|
||||
|
||||
- [input-addressed store object]{#gloss-input-addressed-store-object}\
|
||||
A store object produced by building a
|
||||
non-[content-addressed](#gloss-content-addressed-derivation),
|
||||
non-[fixed-output](#gloss-fixed-output-derivation)
|
||||
derivation.
|
||||
- [store path]{#gloss-store-path}
|
||||
|
||||
- [output-addressed store object]{#gloss-output-addressed-store-object}\
|
||||
A [store object] whose [store path] is determined by its contents.
|
||||
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
||||
The location of a [store object] in the file system, i.e., an
|
||||
immediate child of the Nix store directory.
|
||||
|
||||
- [substitute]{#gloss-substitute}\
|
||||
A substitute is a command invocation stored in the [Nix database] that
|
||||
describes how to build a store object, bypassing the normal build
|
||||
mechanism (i.e., derivations). Typically, the substitute builds the
|
||||
store object by downloading a pre-built version of the store object
|
||||
from some server.
|
||||
Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
|
||||
|
||||
- [substituter]{#gloss-substituter}\
|
||||
An additional [store]{#gloss-store} from which Nix can obtain store objects instead of building them.
|
||||
Often the substituter is a [binary cache](#gloss-binary-cache), but any store can serve as substituter.
|
||||
[store path]: #gloss-store-path
|
||||
|
||||
See the [`substituters` configuration option](./command-ref/conf-file.md#conf-substituters) for details.
|
||||
- [file system object]{#gloss-store-object}
|
||||
|
||||
[substituter]: #gloss-substituter
|
||||
The Nix data model for representing simplified file system data.
|
||||
|
||||
- [purity]{#gloss-purity}\
|
||||
The assumption that equal Nix derivations when run always produce
|
||||
the same output. This cannot be guaranteed in general (e.g., a
|
||||
builder can rely on external inputs such as the network or the
|
||||
system time) but the Nix model assumes it.
|
||||
See [File System Object](@docroot@/architecture/file-system-object.md) for details.
|
||||
|
||||
- [Nix database]{#gloss-nix-database}\
|
||||
An SQlite database to track [reference]s between [store object]s.
|
||||
This is an implementation detail of the [local store].
|
||||
[file system object]: #gloss-file-system-object
|
||||
|
||||
Default location: `/nix/var/nix/db`.
|
||||
- [store object]{#gloss-store-object}
|
||||
|
||||
[Nix database]: #gloss-nix-database
|
||||
A store object consists of a [file system object], [reference]s to other store objects, and other metadata.
|
||||
It can be referred to by a [store path].
|
||||
|
||||
- [Nix expression]{#gloss-nix-expression}\
|
||||
A high-level description of software packages and compositions
|
||||
thereof. Deploying software using Nix entails writing Nix
|
||||
expressions for your packages. Nix expressions are translated to
|
||||
derivations that are stored in the Nix store. These derivations can
|
||||
then be built.
|
||||
[store object]: #gloss-store-object
|
||||
|
||||
- [reference]{#gloss-reference}\
|
||||
A [store object] `O` is said to have a *reference* to a store object `P` if a [store path] to `P` appears in the contents of `O`.
|
||||
- [IFD]{#gloss-ifd}
|
||||
|
||||
Store objects can refer to both other store objects and themselves.
|
||||
References from a store object to itself are called *self-references*.
|
||||
References other than a self-reference must not form a cycle.
|
||||
[Import From Derivation](./language/import-from-derivation.md)
|
||||
|
||||
[reference]: #gloss-reference
|
||||
- [input-addressed store object]{#gloss-input-addressed-store-object}
|
||||
|
||||
- [reachable]{#gloss-reachable}\
|
||||
A store path `Q` is reachable from another store path `P` if `Q`
|
||||
is in the *closure* of the *references* relation.
|
||||
A store object produced by building a
|
||||
non-[content-addressed](#gloss-content-addressed-derivation),
|
||||
non-[fixed-output](#gloss-fixed-output-derivation)
|
||||
derivation.
|
||||
|
||||
- [closure]{#gloss-closure}\
|
||||
The closure of a store path is the set of store paths that are
|
||||
directly or indirectly “reachable” from that store path; that is,
|
||||
it’s the closure of the path under the *references* relation. For
|
||||
a package, the closure of its derivation is equivalent to the
|
||||
build-time dependencies, while the closure of its output path is
|
||||
equivalent to its runtime dependencies. For correct deployment it
|
||||
is necessary to deploy whole closures, since otherwise at runtime
|
||||
files could be missing. The command `nix-store --query --requisites ` prints out
|
||||
closures of store paths.
|
||||
- [output-addressed store object]{#gloss-output-addressed-store-object}
|
||||
|
||||
As an example, if the [store object] at path `P` contains a [reference]
|
||||
to a store object at path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
||||
references `R` then `R` is also in the closure of `P`.
|
||||
A [store object] whose [store path] is determined by its contents.
|
||||
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
||||
|
||||
[closure]: #gloss-closure
|
||||
- [substitute]{#gloss-substitute}
|
||||
|
||||
- [output path]{#gloss-output-path}\
|
||||
A [store path] produced by a [derivation].
|
||||
A substitute is a command invocation stored in the [Nix database] that
|
||||
describes how to build a store object, bypassing the normal build
|
||||
mechanism (i.e., derivations). Typically, the substitute builds the
|
||||
store object by downloading a pre-built version of the store object
|
||||
from some server.
|
||||
|
||||
[output path]: #gloss-output-path
|
||||
- [substituter]{#gloss-substituter}
|
||||
|
||||
- [deriver]{#gloss-deriver}\
|
||||
The [store derivation] that produced an [output path].
|
||||
An additional [store]{#gloss-store} from which Nix can obtain store objects instead of building them.
|
||||
Often the substituter is a [binary cache](#gloss-binary-cache), but any store can serve as substituter.
|
||||
|
||||
- [validity]{#gloss-validity}\
|
||||
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
||||
See the [`substituters` configuration option](./command-ref/conf-file.md#conf-substituters) for details.
|
||||
|
||||
For a [local store], this means:
|
||||
- The store path leads to an existing [store object] in that [store].
|
||||
- The store path is listed in the [Nix database] as being valid.
|
||||
- All paths in the store path's [closure] are valid.
|
||||
[substituter]: #gloss-substituter
|
||||
|
||||
[validity]: #gloss-validity
|
||||
- [purity]{#gloss-purity}
|
||||
|
||||
- [user environment]{#gloss-user-env}\
|
||||
An automatically generated store object that consists of a set of
|
||||
symlinks to “active” applications, i.e., other store paths. These
|
||||
are generated automatically by
|
||||
[`nix-env`](./command-ref/nix-env.md). See *profiles*.
|
||||
The assumption that equal Nix derivations when run always produce
|
||||
the same output. This cannot be guaranteed in general (e.g., a
|
||||
builder can rely on external inputs such as the network or the
|
||||
system time) but the Nix model assumes it.
|
||||
|
||||
- [profile]{#gloss-profile}\
|
||||
A symlink to the current *user environment* of a user, e.g.,
|
||||
`/nix/var/nix/profiles/default`.
|
||||
- [Nix database]{#gloss-nix-database}
|
||||
|
||||
- [installable]{#gloss-installable}\
|
||||
Something that can be realised in the Nix store.
|
||||
An SQlite database to track [reference]s between [store object]s.
|
||||
This is an implementation detail of the [local store].
|
||||
|
||||
See [installables](./command-ref/new-cli/nix.md#installables) for [`nix` commands](./command-ref/new-cli/nix.md) (experimental) for details.
|
||||
Default location: `/nix/var/nix/db`.
|
||||
|
||||
- [NAR]{#gloss-nar}\
|
||||
A *N*ix *AR*chive. This is a serialisation of a path in the Nix
|
||||
store. It can contain regular files, directories and symbolic
|
||||
links. NARs are generated and unpacked using `nix-store --dump`
|
||||
and `nix-store --restore`.
|
||||
[Nix database]: #gloss-nix-database
|
||||
|
||||
- [`∅`]{#gloss-emtpy-set}\
|
||||
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
|
||||
- [Nix expression]{#gloss-nix-expression}
|
||||
|
||||
- [`ε`]{#gloss-epsilon}\
|
||||
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
||||
A high-level description of software packages and compositions
|
||||
thereof. Deploying software using Nix entails writing Nix
|
||||
expressions for your packages. Nix expressions are translated to
|
||||
derivations that are stored in the Nix store. These derivations can
|
||||
then be built.
|
||||
|
||||
- [string interpolation]{#gloss-string-interpolation}\
|
||||
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
|
||||
- [reference]{#gloss-reference}
|
||||
|
||||
See [String interpolation](./language/string-interpolation.md) for details.
|
||||
A [store object] `O` is said to have a *reference* to a store object `P` if a [store path] to `P` appears in the contents of `O`.
|
||||
|
||||
[string]: ./language/values.md#type-string
|
||||
[path]: ./language/values.md#type-path
|
||||
[attribute name]: ./language/values.md#attribute-set
|
||||
Store objects can refer to both other store objects and themselves.
|
||||
References from a store object to itself are called *self-references*.
|
||||
References other than a self-reference must not form a cycle.
|
||||
|
||||
- [experimental feature]{#gloss-experimental-feature}\
|
||||
Not yet stabilized functionality guarded by named experimental feature flags.
|
||||
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
||||
[reference]: #gloss-reference
|
||||
|
||||
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
||||
- [reachable]{#gloss-reachable}
|
||||
|
||||
A store path `Q` is reachable from another store path `P` if `Q`
|
||||
is in the *closure* of the *references* relation.
|
||||
|
||||
- [closure]{#gloss-closure}
|
||||
|
||||
The closure of a store path is the set of store paths that are
|
||||
directly or indirectly “reachable” from that store path; that is,
|
||||
it’s the closure of the path under the *references* relation. For
|
||||
a package, the closure of its derivation is equivalent to the
|
||||
build-time dependencies, while the closure of its output path is
|
||||
equivalent to its runtime dependencies. For correct deployment it
|
||||
is necessary to deploy whole closures, since otherwise at runtime
|
||||
files could be missing. The command `nix-store --query --requisites ` prints out
|
||||
closures of store paths.
|
||||
|
||||
As an example, if the [store object] at path `P` contains a [reference]
|
||||
to a store object at path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
||||
references `R` then `R` is also in the closure of `P`.
|
||||
|
||||
[closure]: #gloss-closure
|
||||
|
||||
- [output]{#gloss-output}
|
||||
|
||||
A [store object] produced by a [derivation].
|
||||
See [the `outputs` argument to the `derivation` function](@docroot@/language/derivations.md#attr-outputs) for details.
|
||||
|
||||
[output]: #gloss-output
|
||||
|
||||
- [output path]{#gloss-output-path}
|
||||
|
||||
The [store path] to the [output] of a [derivation].
|
||||
|
||||
[output path]: #gloss-output-path
|
||||
|
||||
- [deriver]{#gloss-deriver}
|
||||
|
||||
The [store derivation] that produced an [output path].
|
||||
|
||||
- [validity]{#gloss-validity}
|
||||
|
||||
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
||||
|
||||
For a [local store], this means:
|
||||
- The store path leads to an existing [store object] in that [store].
|
||||
- The store path is listed in the [Nix database] as being valid.
|
||||
- All paths in the store path's [closure] are valid.
|
||||
|
||||
[validity]: #gloss-validity
|
||||
|
||||
- [user environment]{#gloss-user-env}
|
||||
|
||||
An automatically generated store object that consists of a set of
|
||||
symlinks to “active” applications, i.e., other store paths. These
|
||||
are generated automatically by
|
||||
[`nix-env`](./command-ref/nix-env.md). See *profiles*.
|
||||
|
||||
- [profile]{#gloss-profile}
|
||||
|
||||
A symlink to the current *user environment* of a user, e.g.,
|
||||
`/nix/var/nix/profiles/default`.
|
||||
|
||||
- [installable]{#gloss-installable}
|
||||
|
||||
Something that can be realised in the Nix store.
|
||||
|
||||
See [installables](./command-ref/new-cli/nix.md#installables) for [`nix` commands](./command-ref/new-cli/nix.md) (experimental) for details.
|
||||
|
||||
- [NAR]{#gloss-nar}
|
||||
|
||||
A *N*ix *AR*chive. This is a serialisation of a path in the Nix
|
||||
store. It can contain regular files, directories and symbolic
|
||||
links. NARs are generated and unpacked using `nix-store --dump`
|
||||
and `nix-store --restore`.
|
||||
|
||||
- [`∅`]{#gloss-emtpy-set}
|
||||
|
||||
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
|
||||
|
||||
- [`ε`]{#gloss-epsilon}
|
||||
|
||||
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
||||
|
||||
- [string interpolation]{#gloss-string-interpolation}
|
||||
|
||||
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
|
||||
|
||||
See [String interpolation](./language/string-interpolation.md) for details.
|
||||
|
||||
[string]: ./language/values.md#type-string
|
||||
[path]: ./language/values.md#type-path
|
||||
[attribute name]: ./language/values.md#attribute-set
|
||||
|
||||
- [experimental feature]{#gloss-experimental-feature}
|
||||
|
||||
Not yet stabilized functionality guarded by named experimental feature flags.
|
||||
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
||||
|
||||
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
After cloning Nix's Git repository, issue the following commands:
|
||||
|
||||
```console
|
||||
$ ./bootstrap.sh
|
||||
$ autoreconf -vfi
|
||||
$ ./configure options...
|
||||
$ make
|
||||
$ make install
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
To run the latest stable release of Nix with Docker run the following command:
|
||||
|
||||
```console
|
||||
$ docker run -ti nixos/nix
|
||||
Unable to find image 'nixos/nix:latest' locally
|
||||
latest: Pulling from nixos/nix
|
||||
$ docker run -ti ghcr.io/nixos/nix
|
||||
Unable to find image 'ghcr.io/nixos/nix:latest' locally
|
||||
latest: Pulling from ghcr.io/nixos/nix
|
||||
5843afab3874: Pull complete
|
||||
b52bf13f109c: Pull complete
|
||||
1e2415612aa3: Pull complete
|
||||
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
|
||||
Status: Downloaded newer image for nixos/nix:latest
|
||||
Status: Downloaded newer image for ghcr.io/nixos/nix:latest
|
||||
35ca4ada6e96:/# nix --version
|
||||
nix (Nix) 2.3.12
|
||||
35ca4ada6e96:/# exit
|
||||
|
|
|
@ -112,6 +112,13 @@ Derivations can declare some infrequently used optional attributes.
|
|||
> environmental variables come from the environment of the
|
||||
> `nix-build`.
|
||||
|
||||
If the [`configurable-impure-env` experimental
|
||||
feature](@docroot@/contributing/experimental-features.md#xp-feature-configurable-impure-env)
|
||||
is enabled, these environment variables can also be controlled
|
||||
through the
|
||||
[`impure-env`](@docroot@/command-ref/conf-file.md#conf-impure-env)
|
||||
configuration setting.
|
||||
|
||||
- [`outputHash`]{#adv-attr-outputHash}; [`outputHashAlgo`]{#adv-attr-outputHashAlgo}; [`outputHashMode`]{#adv-attr-outputHashMode}\
|
||||
These attributes declare that the derivation is a so-called
|
||||
*fixed-output derivation*, which means that a cryptographic hash of
|
||||
|
@ -229,6 +236,8 @@ Derivations can declare some infrequently used optional attributes.
|
|||
[`outputHashAlgo`](#adv-attr-outputHashAlgo)
|
||||
like for *fixed-output derivations* (see above).
|
||||
|
||||
It also implicitly requires that the machine to build the derivation must have the `ca-derivations` [system feature](@docroot@/command-ref/conf-file.md#conf-system-features).
|
||||
|
||||
- [`passAsFile`]{#adv-attr-passAsFile}\
|
||||
A list of names of attributes that should be passed via files rather
|
||||
than environment variables. For example, if you have
|
||||
|
@ -261,6 +270,9 @@ Derivations can declare some infrequently used optional attributes.
|
|||
useful for very trivial derivations (such as `writeText` in Nixpkgs)
|
||||
that are cheaper to build than to substitute from a binary cache.
|
||||
|
||||
You may disable the effects of this attibute by enabling the
|
||||
`always-allow-substitutes` configuration option in Nix.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> You need to have a builder configured which satisfies the
|
||||
|
@ -271,18 +283,21 @@ Derivations can declare some infrequently used optional attributes.
|
|||
|
||||
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
||||
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
||||
attributes are serialised in JSON format and made available to the
|
||||
builder via the file `.attrs.json` in the builder’s temporary
|
||||
directory. This obviates the need for [`passAsFile`](#adv-attr-passAsFile) since JSON files
|
||||
have no size restrictions, unlike process environments.
|
||||
attributes are serialised into a file in JSON format. The environment variable
|
||||
`NIX_ATTRS_JSON_FILE` points to the exact location of that file both in a build
|
||||
and a [`nix-shell`](../command-ref/nix-shell.md). This obviates the need for
|
||||
[`passAsFile`](#adv-attr-passAsFile) since JSON files have no size restrictions,
|
||||
unlike process environments.
|
||||
|
||||
It also makes it possible to tweak derivation settings in a structured way; see
|
||||
[`outputChecks`](#adv-attr-outputChecks) for example.
|
||||
|
||||
As a convenience to Bash builders,
|
||||
Nix writes a script named `.attrs.sh` to the builder’s directory
|
||||
that initialises shell variables corresponding to all attributes
|
||||
that are representable in Bash. This includes non-nested
|
||||
Nix writes a script that initialises shell variables
|
||||
corresponding to all attributes that are representable in Bash. The
|
||||
environment variable `NIX_ATTRS_SH_FILE` points to the exact
|
||||
location of the script, both in a build and a
|
||||
[`nix-shell`](../command-ref/nix-shell.md). This includes non-nested
|
||||
(associative) arrays. For example, the attribute `hardening.format = true`
|
||||
ends up as the Bash associative array element `${hardening[format]}`.
|
||||
|
||||
|
@ -335,3 +350,15 @@ Derivations can declare some infrequently used optional attributes.
|
|||
This is useful, for example, when generating self-contained filesystem images with
|
||||
their own embedded Nix store: hashes found inside such an image refer
|
||||
to the embedded store and not to the host's Nix store.
|
||||
|
||||
- [`requiredSystemFeatures`]{#adv-attr-requiredSystemFeatures}\
|
||||
|
||||
If a derivation has the `requiredSystemFeatures` attribute, then Nix will only build it on a machine that has the corresponding features set in its [`system-features` configuration](@docroot@/command-ref/conf-file.md#conf-system-features).
|
||||
|
||||
For example, setting
|
||||
|
||||
```nix
|
||||
requiredSystemFeatures = [ "kvm" ];
|
||||
```
|
||||
|
||||
ensures that the derivation can only be built on a machine with the `kvm` feature.
|
||||
|
|
27
doc/manual/src/language/constructs/lookup-path.md
Normal file
27
doc/manual/src/language/constructs/lookup-path.md
Normal file
|
@ -0,0 +1,27 @@
|
|||
# Lookup path
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *lookup-path* = `<` *identifier* [ `/` *identifier* ]... `>`
|
||||
|
||||
A lookup path is an identifier with an optional path suffix that resolves to a [path value](@docroot@/language/values.md#type-path) if the identifier matches a search path entry.
|
||||
|
||||
The value of a lookup path is determined by [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
|
||||
|
||||
See [`builtins.findFile`](@docroot@/language/builtins.md#builtins-findFile) for details on lookup path resolution.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> <nixpkgs>
|
||||
>```
|
||||
>
|
||||
> /nix/var/nix/profiles/per-user/root/channels/nixpkgs
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> <nixpkgs/nixos>
|
||||
>```
|
||||
>
|
||||
> /nix/var/nix/profiles/per-user/root/channels/nixpkgs/nixos
|
|
@ -1,161 +1,315 @@
|
|||
# Derivations
|
||||
|
||||
The most important built-in function is `derivation`, which is used to
|
||||
describe a single derivation (a build task). It takes as input a set,
|
||||
the attributes of which specify the inputs of the build.
|
||||
The most important built-in function is `derivation`, which is used to describe a single derivation:
|
||||
a specification for running an executable on precisely defined input files to repeatably produce output files at uniquely determined file system paths.
|
||||
|
||||
- There must be an attribute named [`system`]{#attr-system} whose value must be a
|
||||
string specifying a Nix system type, such as `"i686-linux"` or
|
||||
`"x86_64-darwin"`. (To figure out your system type, run `nix -vv
|
||||
--version`.) The build can only be performed on a machine and
|
||||
operating system matching the system type. (Nix can automatically
|
||||
[forward builds for other
|
||||
platforms](../advanced-topics/distributed-builds.md) by forwarding
|
||||
them to other machines.)
|
||||
It takes as input an attribute set, the attributes of which specify the inputs to the process.
|
||||
It outputs an attribute set, and produces a [store derivation] as a side effect of evaluation.
|
||||
|
||||
- There must be an attribute named `name` whose value must be a
|
||||
string. This is used as a symbolic name for the package by
|
||||
`nix-env`, and it is appended to the output paths of the derivation.
|
||||
[store derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||
|
||||
- There must be an attribute named `builder` that identifies the
|
||||
program that is executed to perform the build. It can be either a
|
||||
derivation or a source (a local file reference, e.g.,
|
||||
`./builder.sh`).
|
||||
## Input attributes
|
||||
|
||||
- Every attribute is passed as an environment variable to the builder.
|
||||
Attribute values are translated to environment variables as follows:
|
||||
### Required
|
||||
|
||||
- Strings and numbers are just passed verbatim.
|
||||
- [`name`]{#attr-name} ([String](@docroot@/language/values.md#type-string))
|
||||
|
||||
- A *path* (e.g., `../foo/sources.tar`) causes the referenced file
|
||||
to be copied to the store; its location in the store is put in
|
||||
the environment variable. The idea is that all sources should
|
||||
reside in the Nix store, since all inputs to a derivation should
|
||||
reside in the Nix store.
|
||||
A symbolic name for the derivation.
|
||||
It is added to the [store path] of the corresponding [store derivation] as well as to its [output paths](@docroot@/glossary.md#gloss-output-path).
|
||||
|
||||
- A *derivation* causes that derivation to be built prior to the
|
||||
present derivation; its default output path is put in the
|
||||
environment variable.
|
||||
[store path]: @docroot@/glossary.md#gloss-store-path
|
||||
|
||||
- Lists of the previous types are also allowed. They are simply
|
||||
concatenated, separated by spaces.
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> name = "hello";
|
||||
> # ...
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> The store derivation's path will be `/nix/store/<hash>-hello.drv`.
|
||||
> The [output](#attr-outputs) paths will be of the form `/nix/store/<hash>-hello[-<output>]`
|
||||
|
||||
- `true` is passed as the string `1`, `false` and `null` are
|
||||
passed as an empty string.
|
||||
- [`system`]{#attr-system} ([String](@docroot@/language/values.md#type-string))
|
||||
|
||||
- The optional attribute `args` specifies command-line arguments to be
|
||||
passed to the builder. It should be a list.
|
||||
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
||||
|
||||
- The optional attribute `outputs` specifies a list of symbolic
|
||||
outputs of the derivation. By default, a derivation produces a
|
||||
single output path, denoted as `out`. However, derivations can
|
||||
produce multiple output paths. This is useful because it allows
|
||||
outputs to be downloaded or garbage-collected separately. For
|
||||
instance, imagine a library package that provides a dynamic library,
|
||||
header files, and documentation. A program that links against the
|
||||
library doesn’t need the header files and documentation at runtime,
|
||||
and it doesn’t need the documentation at build time. Thus, the
|
||||
library package could specify:
|
||||
A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
|
||||
It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
|
||||
|
||||
```nix
|
||||
outputs = [ "lib" "headers" "doc" ];
|
||||
```
|
||||
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
|
||||
|
||||
This will cause Nix to pass environment variables `lib`, `headers`
|
||||
and `doc` to the builder containing the intended store paths of each
|
||||
output. The builder would typically do something like
|
||||
> **Example**
|
||||
>
|
||||
> Declare a derivation to be built on a specific system type:
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> # ...
|
||||
> system = "x86_64-linux";
|
||||
> # ...
|
||||
> }
|
||||
> ```
|
||||
|
||||
```bash
|
||||
./configure \
|
||||
--libdir=$lib/lib \
|
||||
--includedir=$headers/include \
|
||||
--docdir=$doc/share/doc
|
||||
```
|
||||
> **Example**
|
||||
>
|
||||
> Declare a derivation to be built on the system type that evaluates the expression:
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> # ...
|
||||
> system = builtins.currentSystem;
|
||||
> # ...
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) has the value of the [`system` configuration option], and defaults to the system type of the current Nix installation.
|
||||
|
||||
for an Autoconf-style package. You can refer to each output of a
|
||||
derivation by selecting it as an attribute, e.g.
|
||||
- [`builder`]{#attr-builder} ([Path](@docroot@/language/values.md#type-path) | [String](@docroot@/language/values.md#type-string))
|
||||
|
||||
```nix
|
||||
buildInputs = [ pkg.lib pkg.headers ];
|
||||
```
|
||||
Path to an executable that will perform the build.
|
||||
|
||||
The first element of `outputs` determines the *default output*.
|
||||
Thus, you could also write
|
||||
> **Example**
|
||||
>
|
||||
> Use the file located at `/bin/bash` as the builder executable:
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> # ...
|
||||
> builder = "/bin/bash";
|
||||
> # ...
|
||||
> };
|
||||
> ```
|
||||
|
||||
```nix
|
||||
buildInputs = [ pkg pkg.headers ];
|
||||
```
|
||||
<!-- -->
|
||||
|
||||
since `pkg` is equivalent to `pkg.lib`.
|
||||
> **Example**
|
||||
>
|
||||
> Copy a local file to the Nix store for use as the builder executable:
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> # ...
|
||||
> builder = ./builder.sh;
|
||||
> # ...
|
||||
> };
|
||||
> ```
|
||||
|
||||
The function `mkDerivation` in the Nixpkgs standard environment is a
|
||||
wrapper around `derivation` that adds a default value for `system` and
|
||||
always uses Bash as the builder, to which the supplied builder is passed
|
||||
as a command-line argument. See the Nixpkgs manual for details.
|
||||
<!-- -->
|
||||
|
||||
The builder is executed as follows:
|
||||
> **Example**
|
||||
>
|
||||
> Use a file from another derivation as the builder executable:
|
||||
>
|
||||
> ```nix
|
||||
> let pkgs = import <nixpkgs> {}; in
|
||||
> derivation {
|
||||
> # ...
|
||||
> builder = "${pkgs.python}/bin/python";
|
||||
> # ...
|
||||
> };
|
||||
> ```
|
||||
|
||||
- A temporary directory is created under the directory specified by
|
||||
`TMPDIR` (default `/tmp`) where the build will take place. The
|
||||
current directory is changed to this directory.
|
||||
### Optional
|
||||
|
||||
- The environment is cleared and set to the derivation attributes, as
|
||||
specified above.
|
||||
- [`args`]{#attr-args} ([List](@docroot@/language/values.md#list) of [String](@docroot@/language/values.md#type-string))
|
||||
|
||||
- In addition, the following variables are set:
|
||||
Default: `[ ]`
|
||||
|
||||
- `NIX_BUILD_TOP` contains the path of the temporary directory for
|
||||
this build.
|
||||
Command-line arguments to be passed to the [`builder`](#attr-builder) executable.
|
||||
|
||||
- Also, `TMPDIR`, `TEMPDIR`, `TMP`, `TEMP` are set to point to the
|
||||
temporary directory. This is to prevent the builder from
|
||||
accidentally writing temporary files anywhere else. Doing so
|
||||
might cause interference by other processes.
|
||||
> **Example**
|
||||
>
|
||||
> Pass arguments to Bash to interpret a shell command:
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> # ...
|
||||
> builder = "/bin/bash";
|
||||
> args = [ "-c" "echo hello world > $out" ];
|
||||
> # ...
|
||||
> };
|
||||
> ```
|
||||
|
||||
- `PATH` is set to `/path-not-set` to prevent shells from
|
||||
initialising it to their built-in default value.
|
||||
- [`outputs`]{#attr-outputs} ([List](@docroot@/language/values.md#list) of [String](@docroot@/language/values.md#type-string))
|
||||
|
||||
- `HOME` is set to `/homeless-shelter` to prevent programs from
|
||||
using `/etc/passwd` or the like to find the user's home
|
||||
directory, which could cause impurity. Usually, when `HOME` is
|
||||
set, it is used as the location of the home directory, even if
|
||||
it points to a non-existent path.
|
||||
Default: `[ "out" ]`
|
||||
|
||||
- `NIX_STORE` is set to the path of the top-level Nix store
|
||||
directory (typically, `/nix/store`).
|
||||
Symbolic outputs of the derivation.
|
||||
Each output name is passed to the [`builder`](#attr-builder) executable as an environment variable with its value set to the corresponding [store path].
|
||||
|
||||
- For each output declared in `outputs`, the corresponding
|
||||
environment variable is set to point to the intended path in the
|
||||
Nix store for that output. Each output path is a concatenation
|
||||
of the cryptographic hash of all build inputs, the `name`
|
||||
attribute and the output name. (The output name is omitted if
|
||||
it’s `out`.)
|
||||
By default, a derivation produces a single output called `out`.
|
||||
However, derivations can produce multiple outputs.
|
||||
This allows the associated [store objects](@docroot@/glossary.md#gloss-store-object) and their [closures](@docroot@/glossary.md#gloss-closure) to be copied or garbage-collected separately.
|
||||
|
||||
- If an output path already exists, it is removed. Also, locks are
|
||||
acquired to prevent multiple Nix instances from performing the same
|
||||
build at the same time.
|
||||
> **Example**
|
||||
>
|
||||
> Imagine a library package that provides a dynamic library, header files, and documentation.
|
||||
> A program that links against such a library doesn’t need the header files and documentation at runtime, and it doesn’t need the documentation at build time.
|
||||
> Thus, the library package could specify:
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> # ...
|
||||
> outputs = [ "lib" "dev" "doc" ];
|
||||
> # ...
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> This will cause Nix to pass environment variables `lib`, `dev`, and `doc` to the builder containing the intended store paths of each output.
|
||||
> The builder would typically do something like
|
||||
>
|
||||
> ```bash
|
||||
> ./configure \
|
||||
> --libdir=$lib/lib \
|
||||
> --includedir=$dev/include \
|
||||
> --docdir=$doc/share/doc
|
||||
> ```
|
||||
>
|
||||
> for an Autoconf-style package.
|
||||
|
||||
- A log of the combined standard output and error is written to
|
||||
`/nix/var/log/nix`.
|
||||
The name of an output is combined with the name of the derivation to create the name part of the output's store path, unless it is `out`, in which case just the name of the derivation is used.
|
||||
|
||||
- The builder is executed with the arguments specified by the
|
||||
attribute `args`. If it exits with exit code 0, it is considered to
|
||||
have succeeded.
|
||||
> **Example**
|
||||
>
|
||||
>
|
||||
> ```nix
|
||||
> derivation {
|
||||
> name = "example";
|
||||
> outputs = [ "lib" "dev" "doc" "out" ];
|
||||
> # ...
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> The store derivation path will be `/nix/store/<hash>-example.drv`.
|
||||
> The output paths will be
|
||||
> - `/nix/store/<hash>-example-lib`
|
||||
> - `/nix/store/<hash>-example-dev`
|
||||
> - `/nix/store/<hash>-example-doc`
|
||||
> - `/nix/store/<hash>-example`
|
||||
|
||||
- The temporary directory is removed (unless the `-K` option was
|
||||
specified).
|
||||
You can refer to each output of a derivation by selecting it as an attribute.
|
||||
The first element of `outputs` determines the *default output* and ends up at the top-level.
|
||||
|
||||
- If the build was successful, Nix scans each output path for
|
||||
references to input paths by looking for the hash parts of the input
|
||||
paths. Since these are potential runtime dependencies, Nix registers
|
||||
them as dependencies of the output paths.
|
||||
> **Example**
|
||||
>
|
||||
> Select an output by attribute name:
|
||||
>
|
||||
> ```nix
|
||||
> let
|
||||
> myPackage = derivation {
|
||||
> name = "example";
|
||||
> outputs = [ "lib" "dev" "doc" "out" ];
|
||||
> # ...
|
||||
> };
|
||||
> in myPackage.dev
|
||||
> ```
|
||||
>
|
||||
> Since `lib` is the first output, `myPackage` is equivalent to `myPackage.lib`.
|
||||
|
||||
- After the build, Nix sets the last-modified timestamp on all files
|
||||
in the build result to 1 (00:00:01 1/1/1970 UTC), sets the group to
|
||||
the default group, and sets the mode of the file to 0444 or 0555
|
||||
(i.e., read-only, with execute permission enabled if the file was
|
||||
originally executable). Note that possible `setuid` and `setgid`
|
||||
bits are cleared. Setuid and setgid programs are not currently
|
||||
supported by Nix. This is because the Nix archives used in
|
||||
deployment have no concept of ownership information, and because it
|
||||
makes the build result dependent on the user performing the build.
|
||||
<!-- FIXME: refer to the output attributes when we have one -->
|
||||
|
||||
- See [Advanced Attributes](./advanced-attributes.md) for more, infrequently used, optional attributes.
|
||||
|
||||
<!-- FIXME: This should be moved here -->
|
||||
|
||||
- Every other attribute is passed as an environment variable to the builder.
|
||||
Attribute values are translated to environment variables as follows:
|
||||
|
||||
- Strings are passed unchanged.
|
||||
|
||||
- Integral numbers are converted to decimal notation.
|
||||
|
||||
- Floating point numbers are converted to simple decimal or scientific notation with a preset precision.
|
||||
|
||||
- A *path* (e.g., `../foo/sources.tar`) causes the referenced file
|
||||
to be copied to the store; its location in the store is put in
|
||||
the environment variable. The idea is that all sources should
|
||||
reside in the Nix store, since all inputs to a derivation should
|
||||
reside in the Nix store.
|
||||
|
||||
- A *derivation* causes that derivation to be built prior to the
|
||||
present derivation. The environment variable is set to the [store path] of the derivation's default [output](#attr-outputs).
|
||||
|
||||
- Lists of the previous types are also allowed. They are simply
|
||||
concatenated, separated by spaces.
|
||||
|
||||
- `true` is passed as the string `1`, `false` and `null` are
|
||||
passed as an empty string.
|
||||
|
||||
<!-- FIXME: add a section on output attributes -->
|
||||
|
||||
## Builder execution
|
||||
|
||||
The [`builder`](#attr-builder) is executed as follows:
|
||||
|
||||
- A temporary directory is created under the directory specified by
|
||||
`TMPDIR` (default `/tmp`) where the build will take place. The
|
||||
current directory is changed to this directory.
|
||||
|
||||
- The environment is cleared and set to the derivation attributes, as
|
||||
specified above.
|
||||
|
||||
- In addition, the following variables are set:
|
||||
|
||||
- `NIX_BUILD_TOP` contains the path of the temporary directory for
|
||||
this build.
|
||||
|
||||
- Also, `TMPDIR`, `TEMPDIR`, `TMP`, `TEMP` are set to point to the
|
||||
temporary directory. This is to prevent the builder from
|
||||
accidentally writing temporary files anywhere else. Doing so
|
||||
might cause interference by other processes.
|
||||
|
||||
- `PATH` is set to `/path-not-set` to prevent shells from
|
||||
initialising it to their built-in default value.
|
||||
|
||||
- `HOME` is set to `/homeless-shelter` to prevent programs from
|
||||
using `/etc/passwd` or the like to find the user's home
|
||||
directory, which could cause impurity. Usually, when `HOME` is
|
||||
set, it is used as the location of the home directory, even if
|
||||
it points to a non-existent path.
|
||||
|
||||
- `NIX_STORE` is set to the path of the top-level Nix store
|
||||
directory (typically, `/nix/store`).
|
||||
|
||||
- `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs`
|
||||
is set to `true` for the dervation. A detailed explanation of this
|
||||
behavior can be found in the
|
||||
[section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs).
|
||||
|
||||
- For each output declared in `outputs`, the corresponding
|
||||
environment variable is set to point to the intended path in the
|
||||
Nix store for that output. Each output path is a concatenation
|
||||
of the cryptographic hash of all build inputs, the `name`
|
||||
attribute and the output name. (The output name is omitted if
|
||||
it’s `out`.)
|
||||
|
||||
- If an output path already exists, it is removed. Also, locks are
|
||||
acquired to prevent multiple Nix instances from performing the same
|
||||
build at the same time.
|
||||
|
||||
- A log of the combined standard output and error is written to
|
||||
`/nix/var/log/nix`.
|
||||
|
||||
- The builder is executed with the arguments specified by the
|
||||
attribute `args`. If it exits with exit code 0, it is considered to
|
||||
have succeeded.
|
||||
|
||||
- The temporary directory is removed (unless the `-K` option was
|
||||
specified).
|
||||
|
||||
- If the build was successful, Nix scans each output path for
|
||||
references to input paths by looking for the hash parts of the input
|
||||
paths. Since these are potential runtime dependencies, Nix registers
|
||||
them as dependencies of the output paths.
|
||||
|
||||
- After the build, Nix sets the last-modified timestamp on all files
|
||||
in the build result to 1 (00:00:01 1/1/1970 UTC), sets the group to
|
||||
the default group, and sets the mode of the file to 0444 or 0555
|
||||
(i.e., read-only, with execute permission enabled if the file was
|
||||
originally executable). Note that possible `setuid` and `setgid`
|
||||
bits are cleared. Setuid and setgid programs are not currently
|
||||
supported by Nix. This is because the Nix archives used in
|
||||
deployment have no concept of ownership information, and because it
|
||||
makes the build result dependent on the user performing the build.
|
||||
|
|
139
doc/manual/src/language/import-from-derivation.md
Normal file
139
doc/manual/src/language/import-from-derivation.md
Normal file
|
@ -0,0 +1,139 @@
|
|||
# Import From Derivation
|
||||
|
||||
The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object).
|
||||
|
||||
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
||||
|
||||
- [`import`](./builtins.md#builtins-import)` expr`
|
||||
- [`builtins.readFile`](./builtins.md#builtins-readFile)` expr`
|
||||
- [`builtins.readFileType`](./builtins.md#builtins-readFileType)` expr`
|
||||
- [`builtins.readDir`](./builtins.md#builtins-readDir)` expr`
|
||||
- [`builtins.pathExists`](./builtins.md#builtins-pathExists)` expr`
|
||||
- [`builtins.filterSource`](./builtins.md#builtins-filterSource)` f expr`
|
||||
- [`builtins.path`](./builtins.md#builtins-path)` { path = expr; }`
|
||||
- [`builtins.hashFile`](./builtins.md#builtins-hashFile)` t expr`
|
||||
- `builtins.scopedImport x drv`
|
||||
|
||||
When the store path needs to be accessed, evaluation will be paused, the corresponding store object [realised], and then evaluation resumed.
|
||||
|
||||
[realised]: @docroot@/glossary.md#gloss-realise
|
||||
|
||||
This has performance implications:
|
||||
Evaluation can only finish when all required store objects are realised.
|
||||
Since the Nix language evaluator is sequential, it only finds store paths to read from one at a time.
|
||||
While realisation is always parallel, in this case it cannot be done for all required store paths at once, and is therefore much slower than otherwise.
|
||||
|
||||
Realising store objects during evaluation can be disabled by setting [`allow-import-from-derivation`](../command-ref/conf-file.md#conf-allow-import-from-derivation) to `false`.
|
||||
Without IFD it is ensured that evaluation is complete and Nix can produce a build plan before starting any realisation.
|
||||
|
||||
## Example
|
||||
|
||||
In the following Nix expression, the inner derivation `drv` produces a file with contents `hello`.
|
||||
|
||||
```nix
|
||||
# IFD.nix
|
||||
let
|
||||
drv = derivation {
|
||||
name = "hello";
|
||||
builder = "/bin/sh";
|
||||
args = [ "-c" "echo -n hello > $out" ];
|
||||
system = builtins.currentSystem;
|
||||
};
|
||||
in "${builtins.readFile drv} world"
|
||||
```
|
||||
|
||||
```shellSession
|
||||
nix-instantiate IFD.nix --eval --read-write-mode
|
||||
```
|
||||
|
||||
```
|
||||
building '/nix/store/348q1cal6sdgfxs8zqi9v8llrsn4kqkq-hello.drv'...
|
||||
"hello world"
|
||||
```
|
||||
|
||||
The contents of the derivation's output have to be [realised] before they can be read with [`readFile`](./builtins.md#builtins-readFile).
|
||||
Only then evaluation can continue to produce the final result.
|
||||
|
||||
## Illustration
|
||||
|
||||
As a first approximation, the following data flow graph shows how evaluation and building are interleaved, if the value of a Nix expression depends on realising a [store object].
|
||||
Boxes are data structures, arrow labels are transformations.
|
||||
|
||||
```
|
||||
+----------------------+ +------------------------+
|
||||
| Nix evaluator | | Nix store |
|
||||
| .----------------. | | |
|
||||
| | Nix expression | | | |
|
||||
| '----------------' | | |
|
||||
| | | | |
|
||||
| evaluate | | |
|
||||
| | | | |
|
||||
| V | | |
|
||||
| .------------. | | .------------------. |
|
||||
| | derivation |----|-instantiate-|->| store derivation | |
|
||||
| '------------' | | '------------------' |
|
||||
| | | | |
|
||||
| | | realise |
|
||||
| | | | |
|
||||
| | | V |
|
||||
| .----------------. | | .--------------. |
|
||||
| | Nix expression |<-|----read-----|----| store object | |
|
||||
| '----------------' | | '--------------' |
|
||||
| | | | |
|
||||
| evaluate | | |
|
||||
| | | | |
|
||||
| V | | |
|
||||
| .------------. | | |
|
||||
| | value | | | |
|
||||
| '------------' | | |
|
||||
+----------------------+ +------------------------+
|
||||
```
|
||||
|
||||
In more detail, the following sequence diagram shows how the expression is evaluated step by step, and where evaluation is blocked to wait for the build output to appear.
|
||||
|
||||
```
|
||||
.-------. .-------------. .---------.
|
||||
|Nix CLI| |Nix evaluator| |Nix store|
|
||||
'-------' '-------------' '---------'
|
||||
| | |
|
||||
|evaluate IFD.nix| |
|
||||
|--------------->| |
|
||||
| | |
|
||||
| evaluate `"${readFile drv} world"` |
|
||||
| | |
|
||||
| evaluate `readFile drv` |
|
||||
| | |
|
||||
| evaluate `drv` as string |
|
||||
| | |
|
||||
| |instantiate /nix/store/...-hello.drv|
|
||||
| |----------------------------------->|
|
||||
| : |
|
||||
| : realise /nix/store/...-hello.drv |
|
||||
| :----------------------------------->|
|
||||
| : |
|
||||
| |--------.
|
||||
| : | |
|
||||
| (evaluation blocked) | echo hello > $out
|
||||
| : | |
|
||||
| |<-------'
|
||||
| : /nix/store/...-hello |
|
||||
| |<-----------------------------------|
|
||||
| | |
|
||||
| resume `readFile /nix/store/...-hello` |
|
||||
| | |
|
||||
| | readFile /nix/store/...-hello |
|
||||
| |----------------------------------->|
|
||||
| | |
|
||||
| | hello |
|
||||
| |<-----------------------------------|
|
||||
| | |
|
||||
| resume `"${"hello"} world"` |
|
||||
| | |
|
||||
| resume `"hello world"` |
|
||||
| | |
|
||||
| "hello world" | |
|
||||
|<---------------| |
|
||||
.-------. .-------------. .---------.
|
||||
|Nix CLI| |Nix evaluator| |Nix store|
|
||||
'-------' '-------------' '---------'
|
||||
```
|
|
@ -83,7 +83,8 @@ This is an incomplete overview of language features, by example.
|
|||
</td>
|
||||
<td>
|
||||
|
||||
A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n string"`.
|
||||
<!-- FIXME: using two no-break spaces, because apparently mdBook swallows the second regular space! -->
|
||||
A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n string"`.
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -35,6 +35,8 @@
|
|||
|
||||
## Attribute selection
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *attrset* `.` *attrpath* \[ `or` *expr* \]
|
||||
|
||||
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
||||
|
@ -42,21 +44,29 @@ If the attribute doesn’t exist, return the *expr* after `or` if provided, othe
|
|||
|
||||
An attribute path is a dot-separated list of [attribute names](./values.md#attribute-set).
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *attrpath* = *name* [ `.` *name* ]...
|
||||
|
||||
[Attribute selection]: #attribute-selection
|
||||
|
||||
## Has attribute
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *attrset* `?` *attrpath*
|
||||
|
||||
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
|
||||
The result is a [Boolean] value.
|
||||
|
||||
See also: [`builtins.hasAttr`](@docroot@/language/builtins.md#builtins-hasAttr)
|
||||
|
||||
[Boolean]: ./values.md#type-boolean
|
||||
|
||||
[Has attribute]: #has-attribute
|
||||
|
||||
After evaluating *attrset* and *attrpath*, the computational complexity is O(log(*n*)) for *n* attributes in the *attrset*
|
||||
|
||||
## Arithmetic
|
||||
|
||||
Numbers are type-compatible:
|
||||
|
@ -70,6 +80,8 @@ The `+` operator is overloaded to also work on strings and paths.
|
|||
|
||||
## String concatenation
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *string* `+` *string*
|
||||
|
||||
Concatenate two [string]s and merge their string contexts.
|
||||
|
@ -78,6 +90,8 @@ Concatenate two [string]s and merge their string contexts.
|
|||
|
||||
## Path concatenation
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *path* `+` *path*
|
||||
|
||||
Concatenate two [path]s.
|
||||
|
@ -87,6 +101,8 @@ The result is a path.
|
|||
|
||||
## Path and string concatenation
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *path* + *string*
|
||||
|
||||
Concatenate *[path]* with *[string]*.
|
||||
|
@ -100,6 +116,8 @@ The result is a path.
|
|||
|
||||
## String and path concatenation
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *string* + *path*
|
||||
|
||||
Concatenate *[string]* with *[path]*.
|
||||
|
@ -117,6 +135,8 @@ The result is a string.
|
|||
|
||||
## Update
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *attrset1* // *attrset2*
|
||||
|
||||
Update [attribute set] *attrset1* with names and values from *attrset2*.
|
||||
|
|
|
@ -1,19 +1,12 @@
|
|||
# String interpolation
|
||||
|
||||
String interpolation is a language feature where a [string], [path], or [attribute name] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
|
||||
String interpolation is a language feature where a [string], [path], or [attribute name][attribute set] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
|
||||
|
||||
Such a string is an *interpolated string*, and an expression inside is an *interpolated expression*.
|
||||
|
||||
Interpolated expressions must evaluate to one of the following:
|
||||
|
||||
- a [string]
|
||||
- a [path]
|
||||
- a [derivation]
|
||||
Such a construct is called *interpolated string*, and the expression inside is an [interpolated expression](#interpolated-expression).
|
||||
|
||||
[string]: ./values.md#type-string
|
||||
[path]: ./values.md#type-path
|
||||
[attribute name]: ./values.md#attribute-set
|
||||
[derivation]: ../glossary.md#gloss-derivation
|
||||
[attribute set]: ./values.md#attribute-set
|
||||
|
||||
## Examples
|
||||
|
||||
|
@ -70,13 +63,136 @@ you can instead write
|
|||
|
||||
### Attribute name
|
||||
|
||||
Attribute names can be created dynamically with string interpolation:
|
||||
<!--
|
||||
FIXME: these examples are redundant with the main page on attribute sets.
|
||||
figure out what to do about that
|
||||
-->
|
||||
|
||||
```nix
|
||||
let name = "foo"; in
|
||||
{
|
||||
${name} = "bar";
|
||||
}
|
||||
```
|
||||
Attribute names can be interpolated strings.
|
||||
|
||||
{ foo = "bar"; }
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let name = "foo"; in
|
||||
> { ${name} = 123; }
|
||||
> ```
|
||||
>
|
||||
> { foo = 123; }
|
||||
|
||||
Attributes can be selected with interpolated strings.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let name = "foo"; in
|
||||
> { foo = 123; }.${name}
|
||||
> ```
|
||||
>
|
||||
> 123
|
||||
|
||||
# Interpolated expression
|
||||
|
||||
An expression that is interpolated must evaluate to one of the following:
|
||||
|
||||
- a [string]
|
||||
- a [path]
|
||||
- an [attribute set] that has a `__toString` attribute or an `outPath` attribute
|
||||
|
||||
- `__toString` must be a function that takes the attribute set itself and returns a string
|
||||
- `outPath` must be a string
|
||||
|
||||
This includes [derivations](./derivations.md) or [flake inputs](@docroot@/command-ref/new-cli/nix3-flake.md#flake-inputs) (experimental).
|
||||
|
||||
A string interpolates to itself.
|
||||
|
||||
A path in an interpolated expression is first copied into the Nix store, and the resulting string is the [store path] of the newly created [store object](../glossary.md#gloss-store-object).
|
||||
|
||||
[store path]: ../glossary.md#gloss-store-path
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```console
|
||||
> $ mkdir foo
|
||||
> ```
|
||||
>
|
||||
> Reference the empty directory in an interpolated expression:
|
||||
>
|
||||
> ```nix
|
||||
> "${./foo}"
|
||||
> ```
|
||||
>
|
||||
> "/nix/store/2hhl2nz5v0khbn06ys82nrk99aa1xxdw-foo"
|
||||
|
||||
A derivation interpolates to the [store path] of its first [output](./derivations.md#attr-outputs).
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let
|
||||
> pkgs = import <nixpkgs> {};
|
||||
> in
|
||||
> "${pkgs.hello}"
|
||||
> ```
|
||||
>
|
||||
> "/nix/store/4xpfqf29z4m8vbhrqcz064wfmb46w5r7-hello-2.12.1"
|
||||
|
||||
An attribute set interpolates to the return value of the function in the `__toString` applied to the attribute set itself.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let
|
||||
> a = {
|
||||
> value = 1;
|
||||
> __toString = self: toString (self.value + 1);
|
||||
> };
|
||||
> in
|
||||
> "${a}"
|
||||
> ```
|
||||
>
|
||||
> "2"
|
||||
|
||||
An attribute set also interpolates to the value of its `outPath` attribute.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let
|
||||
> a = { outPath = "foo"; };
|
||||
> in
|
||||
> "${a}"
|
||||
> ```
|
||||
>
|
||||
> "foo"
|
||||
|
||||
If both `__toString` and `outPath` are present in an attribute set, `__toString` takes precedence.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let
|
||||
> a = { __toString = _: "yes"; outPath = throw "no"; };
|
||||
> in
|
||||
> "${a}"
|
||||
> ```
|
||||
>
|
||||
> "yes"
|
||||
|
||||
If neither is present, an error is thrown.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```nix
|
||||
> let
|
||||
> a = {};
|
||||
> in
|
||||
> "${a}"
|
||||
> ```
|
||||
>
|
||||
> error: cannot coerce a set to a string
|
||||
>
|
||||
> at «string»:4:2:
|
||||
>
|
||||
> 3| in
|
||||
> 4| "${a}"
|
||||
> | ^
|
||||
|
|
|
@ -107,29 +107,24 @@
|
|||
e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user
|
||||
whose home directory is `/home/edolstra`.
|
||||
|
||||
Paths can also be specified between angle brackets, e.g.
|
||||
`<nixpkgs>`. This means that the directories listed in the
|
||||
environment variable `NIX_PATH` will be searched for the given file
|
||||
or directory name.
|
||||
|
||||
When an [interpolated string][string interpolation] evaluates to a path, the path is first copied into the Nix store and the resulting string is the [store path] of the newly created [store object].
|
||||
|
||||
[store path]: ../glossary.md#gloss-store-path
|
||||
[store object]: ../glossary.md#gloss-store-object
|
||||
|
||||
For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/<hash>-foo.txt"`.
|
||||
|
||||
Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression.
|
||||
For example, assume you used a file path in an interpolated string during a `nix repl` session.
|
||||
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new store path, since Nix might not re-read the file contents.
|
||||
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new [store path], since Nix might not re-read the file contents.
|
||||
|
||||
Paths themselves, except those in angle brackets (`< >`), support [string interpolation].
|
||||
[store path]: ../glossary.md#gloss-store-path
|
||||
|
||||
Paths can include [string interpolation] and can themselves be [interpolated in other expressions].
|
||||
[interpolated in other expressions]: ./string-interpolation.md#interpolated-expressions
|
||||
|
||||
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
||||
|
||||
`a.${foo}/b.${bar}` is a syntactically valid division operation.
|
||||
`./a.${foo}/b.${bar}` is a path.
|
||||
|
||||
[Lookup paths](./constructs/lookup-path.md) such as `<nixpkgs>` resolve to path values.
|
||||
|
||||
- <a id="type-boolean" href="#type-boolean">Boolean</a>
|
||||
|
||||
*Booleans* with values `true` and `false`.
|
||||
|
@ -167,13 +162,17 @@ An attribute set is a collection of name-value-pairs (called *attributes*) enclo
|
|||
An attribute name can be an identifier or a [string](#string).
|
||||
An identifier must start with a letter (`a-z`, `A-Z`) or underscore (`_`), and can otherwise contain letters (`a-z`, `A-Z`), numbers (`0-9`), underscores (`_`), apostrophes (`'`), or dashes (`-`).
|
||||
|
||||
> **Syntax**
|
||||
>
|
||||
> *name* = *identifier* | *string* \
|
||||
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
|
||||
|
||||
Names and values are separated by an equal sign (`=`).
|
||||
Each value is an arbitrary expression terminated by a semicolon (`;`).
|
||||
|
||||
> *attrset* = `{` [ *name* `=` *expr* `;` `]`... `}`
|
||||
> **Syntax**
|
||||
>
|
||||
> *attrset* = `{` [ *name* `=` *expr* `;` ]... `}`
|
||||
|
||||
Attributes can appear in any order.
|
||||
An attribute name may only occur once.
|
||||
|
|
|
@ -1,179 +0,0 @@
|
|||
# Basic Package Management
|
||||
|
||||
The main command for package management is
|
||||
[`nix-env`](../command-ref/nix-env.md). You can use it to install,
|
||||
upgrade, and erase packages, and to query what packages are installed
|
||||
or are available for installation.
|
||||
|
||||
In Nix, different users can have different “views” on the set of
|
||||
installed applications. That is, there might be lots of applications
|
||||
present on the system (possibly in many different versions), but users
|
||||
can have a specific selection of those active — where “active” just
|
||||
means that it appears in a directory in the user’s `PATH`. Such a view
|
||||
on the set of installed applications is called a *user environment*,
|
||||
which is just a directory tree consisting of symlinks to the files of
|
||||
the active applications.
|
||||
|
||||
Components are installed from a set of *Nix expressions* that tell Nix
|
||||
how to build those packages, including, if necessary, their
|
||||
dependencies. There is a collection of Nix expressions called the
|
||||
Nixpkgs package collection that contains packages ranging from basic
|
||||
development stuff such as GCC and Glibc, to end-user applications like
|
||||
Mozilla Firefox. (Nix is however not tied to the Nixpkgs package
|
||||
collection; you could write your own Nix expressions based on Nixpkgs,
|
||||
or completely new ones.)
|
||||
|
||||
You can manually download the latest version of Nixpkgs from
|
||||
<https://github.com/NixOS/nixpkgs>. However, it’s much more
|
||||
convenient to use the Nixpkgs [*channel*](../command-ref/nix-channel.md), since it makes
|
||||
it easy to stay up to date with new versions of Nixpkgs. Nixpkgs is
|
||||
automatically added to your list of “subscribed” channels when you
|
||||
install Nix. If this is not the case for some reason, you can add it
|
||||
as follows:
|
||||
|
||||
```console
|
||||
$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable
|
||||
$ nix-channel --update
|
||||
```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> On NixOS, you’re automatically subscribed to a NixOS channel
|
||||
> corresponding to your NixOS major release (e.g.
|
||||
> <http://nixos.org/channels/nixos-21.11>). A NixOS channel is identical
|
||||
> to the Nixpkgs channel, except that it contains only Linux binaries
|
||||
> and is updated only if a set of regression tests succeed.
|
||||
|
||||
You can view the set of available packages in Nixpkgs:
|
||||
|
||||
```console
|
||||
$ nix-env --query --available --attr-path
|
||||
nixpkgs.aterm aterm-2.2
|
||||
nixpkgs.bash bash-3.0
|
||||
nixpkgs.binutils binutils-2.15
|
||||
nixpkgs.bison bison-1.875d
|
||||
nixpkgs.blackdown blackdown-1.4.2
|
||||
nixpkgs.bzip2 bzip2-1.0.2
|
||||
…
|
||||
```
|
||||
|
||||
The flag `-q` specifies a query operation, `-a` means that you want
|
||||
to show the “available” (i.e., installable) packages, as opposed to the
|
||||
installed packages, and `-P` prints the attribute paths that can be used
|
||||
to unambiguously select a package for installation (listed in the first column).
|
||||
If you downloaded Nixpkgs yourself, or if you checked it out from GitHub,
|
||||
then you need to pass the path to your Nixpkgs tree using the `-f` flag:
|
||||
|
||||
```console
|
||||
$ nix-env --query --available --attr-path --file /path/to/nixpkgs
|
||||
aterm aterm-2.2
|
||||
bash bash-3.0
|
||||
…
|
||||
```
|
||||
|
||||
where */path/to/nixpkgs* is where you’ve unpacked or checked out
|
||||
Nixpkgs.
|
||||
|
||||
You can filter the packages by name:
|
||||
|
||||
```console
|
||||
$ nix-env --query --available --attr-path firefox
|
||||
nixpkgs.firefox-esr firefox-91.3.0esr
|
||||
nixpkgs.firefox firefox-94.0.1
|
||||
```
|
||||
|
||||
and using regular expressions:
|
||||
|
||||
```console
|
||||
$ nix-env --query --available --attr-path 'firefox.*'
|
||||
```
|
||||
|
||||
It is also possible to see the *status* of available packages, i.e.,
|
||||
whether they are installed into the user environment and/or present in
|
||||
the system:
|
||||
|
||||
```console
|
||||
$ nix-env --query --available --attr-path --status
|
||||
…
|
||||
-PS nixpkgs.bash bash-3.0
|
||||
--S nixpkgs.binutils binutils-2.15
|
||||
IPS nixpkgs.bison bison-1.875d
|
||||
…
|
||||
```
|
||||
|
||||
The first character (`I`) indicates whether the package is installed in
|
||||
your current user environment. The second (`P`) indicates whether it is
|
||||
present on your system (in which case installing it into your user
|
||||
environment would be a very quick operation). The last one (`S`)
|
||||
indicates whether there is a so-called *substitute* for the package,
|
||||
which is Nix’s mechanism for doing binary deployment. It just means that
|
||||
Nix knows that it can fetch a pre-built package from somewhere
|
||||
(typically a network server) instead of building it locally.
|
||||
|
||||
You can install a package using `nix-env --install --attr `. For instance,
|
||||
|
||||
```console
|
||||
$ nix-env --install --attr nixpkgs.subversion
|
||||
```
|
||||
|
||||
will install the package called `subversion` from `nixpkgs` channel (which is, of course, the
|
||||
[Subversion version management system](http://subversion.tigris.org/)).
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> When you ask Nix to install a package, it will first try to get it in
|
||||
> pre-compiled form from a *binary cache*. By default, Nix will use the
|
||||
> binary cache <https://cache.nixos.org>; it contains binaries for most
|
||||
> packages in Nixpkgs. Only if no binary is available in the binary
|
||||
> cache, Nix will build the package from source. So if `nix-env
|
||||
> -iA nixpkgs.subversion` results in Nix building stuff from source, then either
|
||||
> the package is not built for your platform by the Nixpkgs build
|
||||
> servers, or your version of Nixpkgs is too old or too new. For
|
||||
> instance, if you have a very recent checkout of Nixpkgs, then the
|
||||
> Nixpkgs build servers may not have had a chance to build everything
|
||||
> and upload the resulting binaries to <https://cache.nixos.org>. The
|
||||
> Nixpkgs channel is only updated after all binaries have been uploaded
|
||||
> to the cache, so if you stick to the Nixpkgs channel (rather than
|
||||
> using a Git checkout of the Nixpkgs tree), you will get binaries for
|
||||
> most packages.
|
||||
|
||||
Naturally, packages can also be uninstalled. Unlike when installing, you will
|
||||
need to use the derivation name (though the version part can be omitted),
|
||||
instead of the attribute path, as `nix-env` does not record which attribute
|
||||
was used for installing:
|
||||
|
||||
```console
|
||||
$ nix-env --uninstall subversion
|
||||
```
|
||||
|
||||
Upgrading to a new version is just as easy. If you have a new release of
|
||||
Nix Packages, you can do:
|
||||
|
||||
```console
|
||||
$ nix-env --upgrade --attr nixpkgs.subversion
|
||||
```
|
||||
|
||||
This will *only* upgrade Subversion if there is a “newer” version in the
|
||||
new set of Nix expressions, as defined by some pretty arbitrary rules
|
||||
regarding ordering of version numbers (which generally do what you’d
|
||||
expect of them). To just unconditionally replace Subversion with
|
||||
whatever version is in the Nix expressions, use `-i` instead of `-u`;
|
||||
`-i` will remove whatever version is already installed.
|
||||
|
||||
You can also upgrade all packages for which there are newer versions:
|
||||
|
||||
```console
|
||||
$ nix-env --upgrade
|
||||
```
|
||||
|
||||
Sometimes it’s useful to be able to ask what `nix-env` would do, without
|
||||
actually doing it. For instance, to find out what packages would be
|
||||
upgraded by `nix-env --upgrade `, you can do
|
||||
|
||||
```console
|
||||
$ nix-env --upgrade --dry-run
|
||||
(dry run; not doing anything)
|
||||
upgrading `libxslt-1.1.0' to `libxslt-1.1.10'
|
||||
upgrading `graphviz-1.10' to `graphviz-1.12'
|
||||
upgrading `coreutils-5.0' to `coreutils-5.2.1'
|
||||
```
|
19
doc/manual/src/protocols/derivation-aterm.md
Normal file
19
doc/manual/src/protocols/derivation-aterm.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Derivation "ATerm" file format
|
||||
|
||||
For historical reasons, [derivations](@docroot@/glossary.md#gloss-store-derivation) are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format.
|
||||
|
||||
Derivations are serialised in one of the following formats:
|
||||
|
||||
- ```
|
||||
Derive(...)
|
||||
```
|
||||
|
||||
For all stable derivations.
|
||||
|
||||
- ```
|
||||
DrvWithVersion(<version-string>, ...)
|
||||
```
|
||||
|
||||
The only `version-string`s that are in use today are for [experimental features](@docroot@/contributing/experimental-features.md):
|
||||
|
||||
- `"xp-dyn-drv"` for the [`dynamic-derivations`](@docroot@/contributing/experimental-features.md#xp-feature-dynamic-derivations) experimental feature.
|
|
@ -20,8 +20,8 @@ Link: <flakeref>; rel="immutable"
|
|||
|
||||
(Note the required `<` and `>` characters around *flakeref*.)
|
||||
|
||||
*flakeref* must be a tarball flakeref. It can contain flake attributes
|
||||
such as `narHash`, `rev` and `revCount`. If `narHash` is included, its
|
||||
*flakeref* must be a tarball flakeref. It can contain the tarball flake attributes
|
||||
`narHash`, `rev`, `revCount` and `lastModified`. If `narHash` is included, its
|
||||
value must be the NAR hash of the unpacked tarball (as computed via
|
||||
`nix hash path`). Nix checks the contents of the returned tarball
|
||||
against the `narHash` attribute. The `rev` and `revCount` attributes
|
||||
|
|
|
@ -1 +1,12 @@
|
|||
# Nix Release Notes
|
||||
|
||||
Nix has a release cycle of roughly 6 weeks.
|
||||
Notable changes and additions are announced in the release notes for each version.
|
||||
|
||||
Bugfixes can be backported on request to previous Nix releases.
|
||||
We typically backport only as far back as the Nix version used in the latest NixOS release, which is announced in the [NixOS release notes](https://nixos.org/manual/nixos/stable/release-notes.html#ch-release-notes).
|
||||
|
||||
Backports never skip releases.
|
||||
If a feature is backported to version `x.y`, it must also be available in version `x.(y+1)`.
|
||||
This ensures that upgrading from an older version with backports is still safe and no backported functionality will go missing.
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
* On Linux, Nix can now run builds in a user namespace where they run
|
||||
as root (UID 0) and have 65,536 UIDs available.
|
||||
<!-- FIXME: move this to its own section about system features -->
|
||||
This is primarily useful for running containers such as `systemd-nspawn`
|
||||
inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
(The store always had to check whether it trusts the client, but now the client is informed of the store's decision.)
|
||||
This is useful for scripting interactions with (non-legacy-ssh) remote Nix stores.
|
||||
|
||||
`nix store ping` and `nix doctor` now display this information.
|
||||
`nix store info` and `nix doctor` now display this information.
|
||||
|
||||
* The new command `nix derivation add` allows adding derivations to the store without involving the Nix language.
|
||||
It exists to round out our collection of basic utility/plumbing commands, and allow for a low barrier-to-entry way of experimenting with alternative front-ends to the Nix Store.
|
||||
|
|
28
doc/manual/src/release-notes/rl-2.18.md
Normal file
28
doc/manual/src/release-notes/rl-2.18.md
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Release 2.18 (2023-09-20)
|
||||
|
||||
- Two new builtin functions,
|
||||
[`builtins.parseFlakeRef`](@docroot@/language/builtins.md#builtins-parseFlakeRef)
|
||||
and
|
||||
[`builtins.flakeRefToString`](@docroot@/language/builtins.md#builtins-flakeRefToString),
|
||||
have been added.
|
||||
These functions are useful for converting between flake references encoded as attribute sets and URLs.
|
||||
|
||||
- [`builtins.toJSON`](@docroot@/language/builtins.md#builtins-parseFlakeRef) now prints [--show-trace](@docroot@/command-ref/conf-file.html#conf-show-trace) items for the path in which it finds an evaluation error.
|
||||
|
||||
- Error messages regarding malformed input to [`nix derivation add`](@docroot@/command-ref/new-cli/nix3-derivation-add.md) are now clearer and more detailed.
|
||||
|
||||
- The `discard-references` feature has been stabilized.
|
||||
This means that the
|
||||
[unsafeDiscardReferences](@docroot@/contributing/experimental-features.md#xp-feature-discard-references)
|
||||
attribute is no longer guarded by an experimental flag and can be used
|
||||
freely.
|
||||
|
||||
- The JSON output for derived paths which are store paths is now a string, not an object with a single `path` field.
|
||||
This only affects `nix-build --json` when "building" non-derivation things like fetched sources, which is a no-op.
|
||||
|
||||
- A new builtin [`outputOf`](@docroot@/language/builtins.md#builtins-outputOf) has been added.
|
||||
It is part of the [`dynamic-derivations`](@docroot@/contributing/experimental-features.md#xp-feature-dynamic-derivations) experimental feature.
|
||||
|
||||
- Flake follow paths at depths greater than 2 are now handled correctly, preventing "follows a non-existent input" errors.
|
||||
|
||||
- [`nix-store --query`](@docroot@/command-ref/nix-store/query.md) gained a new type of query: `--valid-derivers`. It returns all `.drv` files in the local store that *can be* used to build the output passed in argument. This is in contrast to `--deriver`, which returns the single `.drv` file that *was actually* used to build the output passed in argument. In case the output was substituted from a binary cache, this `.drv` file may only exist on said binary cache and not locally.
|
|
@ -24,7 +24,7 @@
|
|||
[repository](https://github.com/NixOS/bundlers) has various bundlers
|
||||
implemented.
|
||||
|
||||
* `nix store ping` now reports the version of the remote Nix daemon.
|
||||
* `nix store info` now reports the version of the remote Nix daemon.
|
||||
|
||||
* `nix flake {init,new}` now display information about which files have been
|
||||
created.
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
# Release X.Y (202?-??-??)
|
||||
|
||||
- Two new builtin functions,
|
||||
[`builtins.parseFlakeRef`](@docroot@/language/builtins.md#builtins-parseFlakeRef)
|
||||
and
|
||||
[`builtins.flakeRefToString`](@docroot@/language/builtins.md#builtins-flakeRefToString),
|
||||
have been added.
|
||||
These functions are useful for converting between flake references encoded as attribute sets and URLs.
|
||||
- [URL flake references](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references) now support [percent-encoded](https://datatracker.ietf.org/doc/html/rfc3986#section-2.1) characters.
|
||||
|
||||
- [`builtins.toJSON`](@docroot@/language/builtins.md#builtins-parseFlakeRef) now prints [--show-trace](@docroot@/command-ref/conf-file.html#conf-show-trace) items for the path in which it finds an evaluation error.
|
||||
- [Path-like flake references](@docroot@/command-ref/new-cli/nix3-flake.md#path-like-syntax) now accept arbitrary unicode characters (except `#` and `?`).
|
||||
|
||||
- Error messages regarding malformed input to [`derivation add`](@docroot@/command-ref/new-cli/nix3-derivation-add.md) are now clearer and more detailed.
|
||||
- The experimental feature `repl-flake` is no longer needed, as its functionality is now part of the `flakes` experimental feature. To get the previous behavior, use the `--file/--expr` flags accordingly.
|
||||
|
||||
- The `discard-references` feature has been stabilized.
|
||||
This means that the
|
||||
[unsafeDiscardReferences](@docroot@/contributing/experimental-features.md#xp-feature-discard-references)
|
||||
attribute is no longer guarded by an experimental flag and can be used
|
||||
freely.
|
||||
- Introduce new flake installable syntax `flakeref#.attrPath` where the "." prefix denotes no searching of default attribute prefixes like `packages.<SYSTEM>` or `legacyPackages.<SYSTEM>`.
|
||||
|
||||
- Nix adds `apple-virt` to the default system features on macOS systems that support virtualization. This is similar to what's done for the `kvm` system feature on Linux hosts.
|
||||
|
||||
- Introduce a new built-in function [`builtins.convertHash`](@docroot@/language/builtins.md#builtins-convertHash).
|
||||
|
||||
- `nix-shell` shebang lines now support single-quoted arguments.
|
||||
|
||||
- `builtins.fetchTree` is now marked as stable.
|
||||
|
|
|
@ -44,63 +44,6 @@ rec {
|
|||
|
||||
optionalString = cond: string: if cond then string else "";
|
||||
|
||||
showSetting = { useAnchors }: name: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
|
||||
let
|
||||
result = squash ''
|
||||
- ${if useAnchors
|
||||
then ''<span id="conf-${name}">[`${name}`](#conf-${name})</span>''
|
||||
else ''`${name}`''}
|
||||
|
||||
${indent " " body}
|
||||
'';
|
||||
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
> **Warning**
|
||||
> This setting is part of an
|
||||
> [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||
|
||||
To change this setting, you need to make sure the corresponding experimental feature,
|
||||
[`${experimentalFeature}`](@docroot@/contributing/experimental-features.md#xp-feature-${experimentalFeature}),
|
||||
is enabled.
|
||||
For example, include the following in [`nix.conf`](#):
|
||||
|
||||
```
|
||||
extra-experimental-features = ${experimentalFeature}
|
||||
${name} = ...
|
||||
```
|
||||
'';
|
||||
|
||||
# separate body to cleanly handle indentation
|
||||
body = ''
|
||||
${description}
|
||||
|
||||
${experimentalFeatureNote}
|
||||
|
||||
**Default:** ${showDefault documentDefault defaultValue}
|
||||
|
||||
${showAliases aliases}
|
||||
'';
|
||||
|
||||
showDefault = documentDefault: defaultValue:
|
||||
if documentDefault then
|
||||
# a StringMap value type is specified as a string, but
|
||||
# this shows the value type. The empty stringmap is `null` in
|
||||
# JSON, but that converts to `{ }` here.
|
||||
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
|
||||
then "*empty*"
|
||||
else if isBool defaultValue then
|
||||
if defaultValue then "`true`" else "`false`"
|
||||
else "`${toString defaultValue}`"
|
||||
else "*machine-specific*";
|
||||
|
||||
showAliases = aliases:
|
||||
optionalString (aliases != [])
|
||||
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||
|
||||
in result;
|
||||
|
||||
indent = prefix: s:
|
||||
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||
|
||||
showSettings = args: settingsInfo: concatStrings (attrValues (mapAttrs (showSetting args) settingsInfo));
|
||||
}
|
||||
|
|
|
@ -34,16 +34,16 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1670461440,
|
||||
"narHash": "sha256-jy1LB8HOMKGJEGXgzFRLDU1CBGL0/LlkolgnqIsF0D8=",
|
||||
"lastModified": 1695283060,
|
||||
"narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "04a75b2eecc0acf6239acf9dd04485ff8d14f425",
|
||||
"rev": "31ed632c692e6a36cfc18083b88ece892f863ed4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-22.11-small",
|
||||
"ref": "nixos-23.05-small",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
|
227
flake.nix
227
flake.nix
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
description = "The purely functional package manager";
|
||||
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11-small";
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||
|
@ -19,11 +19,16 @@
|
|||
then ""
|
||||
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
|
||||
|
||||
linux32BitSystems = [ "i686-linux" ];
|
||||
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
|
||||
linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
|
||||
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
|
||||
linuxSystems = linux32BitSystems ++ linux64BitSystems;
|
||||
darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ];
|
||||
systems = linuxSystems ++ darwinSystems;
|
||||
|
||||
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
||||
crossSystems = [
|
||||
"armv6l-linux" "armv7l-linux"
|
||||
"x86_64-freebsd13" "x86_64-netbsd"
|
||||
];
|
||||
|
||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
||||
|
||||
|
@ -40,12 +45,69 @@
|
|||
})
|
||||
stdenvs);
|
||||
|
||||
# Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
|
||||
# Not an "idiomatic" flake input because:
|
||||
# - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
|
||||
# - Subflake would download redundant and huge parent flake
|
||||
# - No git tree hash support: https://github.com/NixOS/nix/issues/6044
|
||||
inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
|
||||
fileset;
|
||||
|
||||
baseFiles =
|
||||
# .gitignore has already been processed, so any changes in it are irrelevant
|
||||
# at this point. It is not represented verbatim for test purposes because
|
||||
# that would interfere with repo semantics.
|
||||
fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
||||
|
||||
configureFiles = fileset.unions [
|
||||
./.version
|
||||
./configure.ac
|
||||
./m4
|
||||
# TODO: do we really need README.md? It doesn't seem used in the build.
|
||||
./README.md
|
||||
];
|
||||
|
||||
topLevelBuildFiles = fileset.unions [
|
||||
./local.mk
|
||||
./Makefile
|
||||
./Makefile.config.in
|
||||
./mk
|
||||
];
|
||||
|
||||
functionalTestFiles = fileset.unions [
|
||||
./tests/functional
|
||||
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
||||
];
|
||||
|
||||
nixSrc = fileset.toSource {
|
||||
root = ./.;
|
||||
fileset = fileset.intersect baseFiles (fileset.unions [
|
||||
configureFiles
|
||||
topLevelBuildFiles
|
||||
./boehmgc-coroutine-sp-fallback.diff
|
||||
./doc
|
||||
./misc
|
||||
./precompiled-headers.h
|
||||
./src
|
||||
./unit-test-data
|
||||
./COPYING
|
||||
./scripts/local.mk
|
||||
functionalTestFiles
|
||||
]);
|
||||
};
|
||||
|
||||
# Memoize nixpkgs for different platforms for efficiency.
|
||||
nixpkgsFor = forAllSystems
|
||||
(system: let
|
||||
make-pkgs = crossSystem: stdenv: import nixpkgs {
|
||||
inherit system crossSystem;
|
||||
localSystem = {
|
||||
inherit system;
|
||||
};
|
||||
crossSystem = if crossSystem == null then null else {
|
||||
system = crossSystem;
|
||||
} // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") {
|
||||
useLLVM = true;
|
||||
};
|
||||
overlays = [
|
||||
(overlayFor (p: p.${stdenv}))
|
||||
];
|
||||
|
@ -131,9 +193,9 @@
|
|||
libarchive
|
||||
boost
|
||||
lowdown-nix
|
||||
libsodium
|
||||
]
|
||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
||||
|
||||
checkDeps = [
|
||||
|
@ -209,7 +271,14 @@
|
|||
"-${client.version}-against-${daemon.version}";
|
||||
inherit version;
|
||||
|
||||
src = self;
|
||||
src = fileset.toSource {
|
||||
root = ./.;
|
||||
fileset = fileset.intersect baseFiles (fileset.unions [
|
||||
configureFiles
|
||||
topLevelBuildFiles
|
||||
functionalTestFiles
|
||||
]);
|
||||
};
|
||||
|
||||
VERSION_SUFFIX = versionSuffix;
|
||||
|
||||
|
@ -219,7 +288,9 @@
|
|||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
configureFlags = testConfigureFlags; # otherwise configure fails
|
||||
configureFlags =
|
||||
testConfigureFlags # otherwise configure fails
|
||||
++ [ "--disable-build" ];
|
||||
dontBuild = true;
|
||||
doInstallCheck = true;
|
||||
|
||||
|
@ -227,7 +298,10 @@
|
|||
mkdir -p $out
|
||||
'';
|
||||
|
||||
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
|
||||
installCheckPhase = ''
|
||||
mkdir -p src/nix-channel
|
||||
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
|
||||
'';
|
||||
};
|
||||
|
||||
binaryTarball = nix: pkgs:
|
||||
|
@ -320,18 +394,11 @@
|
|||
};
|
||||
let
|
||||
canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform;
|
||||
|
||||
sourceByRegexInverted = rxs: origSrc: final.lib.cleanSourceWith {
|
||||
filter = (path: type:
|
||||
let relPath = final.lib.removePrefix (toString origSrc + "/") (toString path);
|
||||
in ! lib.any (re: builtins.match re relPath != null) rxs);
|
||||
src = origSrc;
|
||||
};
|
||||
in currentStdenv.mkDerivation (finalAttrs: {
|
||||
name = "nix-${version}";
|
||||
inherit version;
|
||||
|
||||
src = sourceByRegexInverted [ "tests/nixos/.*" "tests/installer/.*" ] self;
|
||||
src = nixSrc;
|
||||
VERSION_SUFFIX = versionSuffix;
|
||||
|
||||
outputs = [ "out" "dev" "doc" ];
|
||||
|
@ -407,39 +474,13 @@
|
|||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
passthru.perl-bindings = with final; perl.pkgs.toPerlModule (currentStdenv.mkDerivation {
|
||||
name = "nix-perl-${version}";
|
||||
|
||||
src = self;
|
||||
|
||||
nativeBuildInputs =
|
||||
[ buildPackages.autoconf-archive
|
||||
buildPackages.autoreconfHook
|
||||
buildPackages.pkg-config
|
||||
];
|
||||
|
||||
buildInputs =
|
||||
[ nix
|
||||
curl
|
||||
bzip2
|
||||
xz
|
||||
pkgs.perl
|
||||
boost
|
||||
]
|
||||
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
|
||||
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||
|
||||
configureFlags = [
|
||||
"--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}"
|
||||
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||
});
|
||||
passthru.perl-bindings = final.callPackage ./perl {
|
||||
inherit fileset;
|
||||
stdenv = currentStdenv;
|
||||
};
|
||||
|
||||
meta.platforms = lib.platforms.unix;
|
||||
meta.mainProgram = "nix";
|
||||
});
|
||||
|
||||
lowdown-nix = with final; currentStdenv.mkDerivation rec {
|
||||
|
@ -460,18 +501,6 @@
|
|||
};
|
||||
};
|
||||
|
||||
nixos-lib = import (nixpkgs + "/nixos/lib") { };
|
||||
|
||||
# https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests
|
||||
runNixOSTestFor = system: test: nixos-lib.runTest {
|
||||
imports = [ test ];
|
||||
hostPkgs = nixpkgsFor.${system}.native;
|
||||
defaults = {
|
||||
nixpkgs.pkgs = nixpkgsFor.${system}.native;
|
||||
};
|
||||
_module.args.nixpkgs = nixpkgs;
|
||||
};
|
||||
|
||||
in {
|
||||
# A Nixpkgs overlay that overrides the 'nix' and
|
||||
# 'nix.perl-bindings' packages.
|
||||
|
@ -529,7 +558,7 @@
|
|||
releaseTools.coverageAnalysis {
|
||||
name = "nix-coverage-${version}";
|
||||
|
||||
src = self;
|
||||
src = nixSrc;
|
||||
|
||||
configureFlags = testConfigureFlags;
|
||||
|
||||
|
@ -546,6 +575,8 @@
|
|||
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
||||
|
||||
hardeningDisable = ["fortify"];
|
||||
|
||||
NIX_CFLAGS_COMPILE = "-DCOVERAGE=1";
|
||||
};
|
||||
|
||||
# API docs for Nix's unstable internal C++ interfaces.
|
||||
|
@ -557,7 +588,7 @@
|
|||
pname = "nix-internal-api-docs";
|
||||
inherit version;
|
||||
|
||||
src = self;
|
||||
src = nixSrc;
|
||||
|
||||
configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags;
|
||||
|
||||
|
@ -576,47 +607,29 @@
|
|||
};
|
||||
|
||||
# System tests.
|
||||
tests.authorization = runNixOSTestFor "x86_64-linux" ./tests/nixos/authorization.nix;
|
||||
tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // {
|
||||
|
||||
tests.remoteBuilds = runNixOSTestFor "x86_64-linux" ./tests/nixos/remote-builds.nix;
|
||||
# Make sure that nix-env still produces the exact same result
|
||||
# on a particular version of Nixpkgs.
|
||||
evalNixpkgs =
|
||||
with nixpkgsFor.x86_64-linux.native;
|
||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||
''
|
||||
type -p nix-env
|
||||
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
||||
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
||||
mkdir $out
|
||||
'';
|
||||
|
||||
tests.nix-copy-closure = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy-closure.nix;
|
||||
|
||||
tests.nix-copy = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy.nix;
|
||||
|
||||
tests.nssPreload = runNixOSTestFor "x86_64-linux" ./tests/nixos/nss-preload.nix;
|
||||
|
||||
tests.githubFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/github-flakes.nix;
|
||||
|
||||
tests.sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/sourcehut-flakes.nix;
|
||||
|
||||
tests.tarballFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/tarball-flakes.nix;
|
||||
|
||||
tests.containers = runNixOSTestFor "x86_64-linux" ./tests/nixos/containers/containers.nix;
|
||||
|
||||
tests.setuid = lib.genAttrs
|
||||
["i686-linux" "x86_64-linux"]
|
||||
(system: runNixOSTestFor system ./tests/nixos/setuid.nix);
|
||||
|
||||
|
||||
# Make sure that nix-env still produces the exact same result
|
||||
# on a particular version of Nixpkgs.
|
||||
tests.evalNixpkgs =
|
||||
with nixpkgsFor.x86_64-linux.native;
|
||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||
''
|
||||
type -p nix-env
|
||||
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
||||
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
||||
mkdir $out
|
||||
'';
|
||||
|
||||
tests.nixpkgsLibTests =
|
||||
forAllSystems (system:
|
||||
import (nixpkgs + "/lib/tests/release.nix")
|
||||
{ pkgs = nixpkgsFor.${system}.native; }
|
||||
);
|
||||
nixpkgsLibTests =
|
||||
forAllSystems (system:
|
||||
import (nixpkgs + "/lib/tests/release.nix")
|
||||
{ pkgs = nixpkgsFor.${system}.native;
|
||||
nixVersions = [ self.packages.${system}.nix ];
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||
pkgs = nixpkgsFor.x86_64-linux.native;
|
||||
|
@ -687,6 +700,9 @@
|
|||
|
||||
devShells = let
|
||||
makeShell = pkgs: stdenv:
|
||||
let
|
||||
canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
||||
in
|
||||
with commonDeps { inherit pkgs; };
|
||||
stdenv.mkDerivation {
|
||||
name = "nix";
|
||||
|
@ -694,13 +710,18 @@
|
|||
outputs = [ "out" "dev" "doc" ];
|
||||
|
||||
nativeBuildInputs = nativeBuildDeps
|
||||
++ (lib.optionals stdenv.cc.isClang [ pkgs.bear pkgs.clang-tools ]);
|
||||
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
||||
++ lib.optional
|
||||
(stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
|
||||
pkgs.buildPackages.clang-tools
|
||||
;
|
||||
|
||||
buildInputs = buildDeps ++ propagatedDeps
|
||||
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
||||
|
||||
configureFlags = configureFlags
|
||||
++ testConfigureFlags ++ internalApiDocsConfigureFlags;
|
||||
++ testConfigureFlags ++ internalApiDocsConfigureFlags
|
||||
++ lib.optional (!canRunInstalled) "--disable-doc-gen";
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
|
|
2
local.mk
2
local.mk
|
@ -1,5 +1,3 @@
|
|||
clean-files += Makefile.config
|
||||
|
||||
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch
|
||||
# Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers.
|
||||
ERROR_SWITCH_ENUM = -Werror=switch-enum
|
||||
|
|
|
@ -50,7 +50,9 @@ The team meets twice a week:
|
|||
1. Code review on pull requests from [In review](#in-review).
|
||||
2. Other chores and tasks.
|
||||
|
||||
Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50).
|
||||
Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw).
|
||||
Notes on issues and pull requests are posted as comments and linked from the meeting notes, so they are easy to find from both places.
|
||||
[All meeting notes](https://discourse.nixos.org/search?expanded=true&q=Nix%20team%20meeting%20minutes%20%23%20%23dev%3Anix%20in%3Atitle%20order%3Alatest_topic) are published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50).
|
||||
|
||||
## Project board protocol
|
||||
|
||||
|
@ -96,8 +98,10 @@ What constitutes a trivial pull request is up to maintainers' judgement.
|
|||
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||
|
||||
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||
Whenever the discussion opens up questions about the process or this team's goals, this may indicate that the change is too large in scope.
|
||||
In that case it is taken off the board to be reconsidered by the author or broken down into smaller pieces that are less far-reaching and can be reviewed independently.
|
||||
|
||||
As a general guideline, the order of items is determined as follows:
|
||||
As a general guideline, the order of items to discuss is determined as follows:
|
||||
|
||||
- Prioritise pull requests over issues
|
||||
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
test_dir=tests/functional
|
||||
|
||||
test=$(echo -n "$test" | sed -e "s|^$test_dir/||")
|
||||
|
||||
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
||||
|
||||
: ${BASH:=/usr/bin/env bash}
|
||||
|
||||
init_test () {
|
||||
cd tests && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
||||
cd "$test_dir" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
||||
}
|
||||
|
||||
run_test_proper () {
|
||||
cd $(dirname $test) && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
||||
cd "$test_dir/$(dirname $test)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
||||
}
|
||||
|
|
|
@ -87,6 +87,6 @@ define build-program
|
|||
# Phony target to run this program (typically as a dependency of 'check').
|
||||
.PHONY: $(1)_RUN
|
||||
$(1)_RUN: $$($(1)_PATH)
|
||||
$(trace-test) $$($(1)_PATH)
|
||||
$(trace-test) $$(UNIT_TEST_ENV) $$($(1)_PATH)
|
||||
|
||||
endef
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
makefiles = local.mk
|
||||
|
||||
GLOBAL_CXXFLAGS += -g -Wall -std=c++2a -I ../src
|
||||
GLOBAL_CXXFLAGS += -g -Wall -std=c++2a
|
||||
|
||||
# A convenience for concurrent development of Nix and its Perl bindings.
|
||||
# Not needed in a standalone build of the Perl bindings.
|
||||
ifneq ("$(wildcard ../src)", "")
|
||||
GLOBAL_CXXFLAGS += -I ../src
|
||||
endif
|
||||
|
||||
-include Makefile.config
|
||||
|
||||
|
|
51
perl/default.nix
Normal file
51
perl/default.nix
Normal file
|
@ -0,0 +1,51 @@
|
|||
{ lib, fileset
|
||||
, stdenv
|
||||
, perl, perlPackages
|
||||
, autoconf-archive, autoreconfHook, pkg-config
|
||||
, nix, curl, bzip2, xz, boost, libsodium, darwin
|
||||
}:
|
||||
|
||||
perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||
name = "nix-perl-${nix.version}";
|
||||
|
||||
src = fileset.toSource {
|
||||
root = ../.;
|
||||
fileset = fileset.unions [
|
||||
../.version
|
||||
../m4
|
||||
../mk
|
||||
./MANIFEST
|
||||
./Makefile
|
||||
./Makefile.config.in
|
||||
./configure.ac
|
||||
./lib
|
||||
./local.mk
|
||||
];
|
||||
};
|
||||
|
||||
nativeBuildInputs =
|
||||
[ autoconf-archive
|
||||
autoreconfHook
|
||||
pkg-config
|
||||
];
|
||||
|
||||
buildInputs =
|
||||
[ nix
|
||||
curl
|
||||
bzip2
|
||||
xz
|
||||
perl
|
||||
boost
|
||||
]
|
||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||
|
||||
configureFlags = [
|
||||
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
|
||||
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||
})
|
|
@ -78,7 +78,7 @@ SV * queryReferences(char * path)
|
|||
SV * queryPathHash(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32)
|
|||
XPUSHs(&PL_sv_undef);
|
||||
else
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
||||
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
mXPUSHi(info->registrationTime);
|
||||
mXPUSHi(info->narSize);
|
||||
|
@ -206,7 +206,7 @@ SV * hashPath(char * algo, int base32, char * path)
|
|||
PPCODE:
|
||||
try {
|
||||
Hash h = hashPath(parseHashType(algo), path).first;
|
||||
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -217,7 +217,7 @@ SV * hashFile(char * algo, int base32, char * path)
|
|||
PPCODE:
|
||||
try {
|
||||
Hash h = hashFile(parseHashType(algo), path);
|
||||
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -228,7 +228,7 @@ SV * hashString(char * algo, int base32, char * s)
|
|||
PPCODE:
|
||||
try {
|
||||
Hash h = hashString(parseHashType(algo), s);
|
||||
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -239,7 +239,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
|
|||
PPCODE:
|
||||
try {
|
||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
||||
auto s = h.to_string(toBase32 ? Base32 : Base16, false);
|
||||
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -324,7 +324,7 @@ SV * derivationFromPath(char * drvPath)
|
|||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||
|
||||
AV * inputDrvs = newAV();
|
||||
for (auto & i : drv.inputDrvs)
|
||||
for (auto & i : drv.inputDrvs.map)
|
||||
av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
||||
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
|
||||
|
||||
|
|
|
@ -452,6 +452,14 @@ EOF
|
|||
# a row for different files.
|
||||
if [ -e "$profile_target$PROFILE_BACKUP_SUFFIX" ]; then
|
||||
# this backup process first released in Nix 2.1
|
||||
|
||||
if diff -q "$profile_target$PROFILE_BACKUP_SUFFIX" "$profile_target" > /dev/null; then
|
||||
# a backup file for the rc-file exist, but they are identical,
|
||||
# so we can safely ignore it and overwrite it with the same
|
||||
# content later
|
||||
continue
|
||||
fi
|
||||
|
||||
failure <<EOF
|
||||
I back up shell profile/rc scripts before I add Nix to them.
|
||||
I need to back up $profile_target to $profile_target$PROFILE_BACKUP_SUFFIX,
|
||||
|
|
|
@ -314,7 +314,7 @@ connected:
|
|||
//
|
||||
// 2. Changing the `inputSrcs` set changes the associated
|
||||
// output ids, which break CA derivations
|
||||
if (!drv.inputDrvs.empty())
|
||||
if (!drv.inputDrvs.map.empty())
|
||||
drv.inputSrcs = store->parseStorePathSet(inputs);
|
||||
optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv);
|
||||
auto & result = *optResult;
|
||||
|
@ -322,7 +322,12 @@ connected:
|
|||
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
|
||||
} else {
|
||||
copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute);
|
||||
auto res = sshStore->buildPathsWithResults({ DerivedPath::Built { *drvPath, OutputsSpec::All {} } });
|
||||
auto res = sshStore->buildPathsWithResults({
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(*drvPath),
|
||||
.outputs = OutputsSpec::All {},
|
||||
}
|
||||
});
|
||||
// One path to build should produce exactly one build result
|
||||
assert(res.size() == 1);
|
||||
optResult = std::move(res[0]);
|
||||
|
|
|
@ -8,13 +8,39 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
nlohmann::json BuiltPath::Built::toJSON(ref<Store> store) const {
|
||||
nlohmann::json res;
|
||||
res["drvPath"] = store->printStorePath(drvPath);
|
||||
for (const auto& [output, path] : outputs) {
|
||||
res["outputs"][output] = store->printStorePath(path);
|
||||
#define CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, COMPARATOR) \
|
||||
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
|
||||
{ \
|
||||
const MY_TYPE* me = this; \
|
||||
auto fields1 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
||||
me = &other; \
|
||||
auto fields2 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
return res;
|
||||
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \
|
||||
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, ==) \
|
||||
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \
|
||||
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <)
|
||||
|
||||
#define FIELD_TYPE std::pair<std::string, StorePath>
|
||||
CMP(SingleBuiltPath, SingleBuiltPathBuilt, output)
|
||||
#undef FIELD_TYPE
|
||||
|
||||
#define FIELD_TYPE std::map<std::string, StorePath>
|
||||
CMP(SingleBuiltPath, BuiltPathBuilt, outputs)
|
||||
#undef FIELD_TYPE
|
||||
|
||||
#undef CMP
|
||||
#undef CMP_ONE
|
||||
|
||||
StorePath SingleBuiltPath::outPath() const
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](const SingleBuiltPath::Opaque & p) { return p.path; },
|
||||
[](const SingleBuiltPath::Built & b) { return b.output.second; },
|
||||
}, raw()
|
||||
);
|
||||
}
|
||||
|
||||
StorePathSet BuiltPath::outPaths() const
|
||||
|
@ -32,6 +58,62 @@ StorePathSet BuiltPath::outPaths() const
|
|||
);
|
||||
}
|
||||
|
||||
SingleDerivedPath::Built SingleBuiltPath::Built::discardOutputPath() const
|
||||
{
|
||||
return SingleDerivedPath::Built {
|
||||
.drvPath = make_ref<SingleDerivedPath>(drvPath->discardOutputPath()),
|
||||
.output = output.first,
|
||||
};
|
||||
}
|
||||
|
||||
SingleDerivedPath SingleBuiltPath::discardOutputPath() const
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath {
|
||||
return p;
|
||||
},
|
||||
[](const SingleBuiltPath::Built & b) -> SingleDerivedPath {
|
||||
return b.discardOutputPath();
|
||||
},
|
||||
}, raw()
|
||||
);
|
||||
}
|
||||
|
||||
nlohmann::json BuiltPath::Built::toJSON(const Store & store) const
|
||||
{
|
||||
nlohmann::json res;
|
||||
res["drvPath"] = drvPath->toJSON(store);
|
||||
for (const auto & [outputName, outputPath] : outputs) {
|
||||
res["outputs"][outputName] = store.printStorePath(outputPath);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const
|
||||
{
|
||||
nlohmann::json res;
|
||||
res["drvPath"] = drvPath->toJSON(store);
|
||||
auto & [outputName, outputPath] = output;
|
||||
res["output"] = outputName;
|
||||
res["outputPath"] = store.printStorePath(outputPath);
|
||||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json SingleBuiltPath::toJSON(const Store & store) const
|
||||
{
|
||||
return std::visit([&](const auto & buildable) {
|
||||
return buildable.toJSON(store);
|
||||
}, raw());
|
||||
}
|
||||
|
||||
nlohmann::json BuiltPath::toJSON(const Store & store) const
|
||||
{
|
||||
return std::visit([&](const auto & buildable) {
|
||||
return buildable.toJSON(store);
|
||||
}, raw());
|
||||
}
|
||||
|
||||
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
||||
{
|
||||
RealisedPath::Set res;
|
||||
|
@ -40,7 +122,7 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
|
||||
[&](const BuiltPath::Built & p) {
|
||||
auto drvHashes =
|
||||
staticOutputHashes(store, store.readDerivation(p.drvPath));
|
||||
staticOutputHashes(store, store.readDerivation(p.drvPath->outPath()));
|
||||
for (auto& [outputName, outputPath] : p.outputs) {
|
||||
if (experimentalFeatureSettings.isEnabled(
|
||||
Xp::CaDerivations)) {
|
||||
|
@ -48,7 +130,7 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
if (!drvOutput)
|
||||
throw Error(
|
||||
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
|
||||
store.printStorePath(p.drvPath), outputName);
|
||||
store.printStorePath(p.drvPath->outPath()), outputName);
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{*drvOutput, outputName});
|
||||
assert(thisRealisation); // We’ve built it, so we must
|
||||
|
|
|
@ -3,19 +3,64 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct SingleBuiltPath;
|
||||
|
||||
struct SingleBuiltPathBuilt {
|
||||
ref<SingleBuiltPath> drvPath;
|
||||
std::pair<std::string, StorePath> output;
|
||||
|
||||
SingleDerivedPathBuilt discardOutputPath() const;
|
||||
|
||||
std::string to_string(const Store & store) const;
|
||||
static SingleBuiltPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
||||
nlohmann::json toJSON(const Store & store) const;
|
||||
|
||||
DECLARE_CMP(SingleBuiltPathBuilt);
|
||||
};
|
||||
|
||||
using _SingleBuiltPathRaw = std::variant<
|
||||
DerivedPathOpaque,
|
||||
SingleBuiltPathBuilt
|
||||
>;
|
||||
|
||||
struct SingleBuiltPath : _SingleBuiltPathRaw {
|
||||
using Raw = _SingleBuiltPathRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
using Opaque = DerivedPathOpaque;
|
||||
using Built = SingleBuiltPathBuilt;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
StorePath outPath() const;
|
||||
|
||||
SingleDerivedPath discardOutputPath() const;
|
||||
|
||||
static SingleBuiltPath parse(const Store & store, std::string_view);
|
||||
nlohmann::json toJSON(const Store & store) const;
|
||||
};
|
||||
|
||||
static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
|
||||
{
|
||||
return make_ref<SingleBuiltPath>(SingleBuiltPath::Opaque { drvPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* A built derived path with hints in the form of optional concrete output paths.
|
||||
*
|
||||
* See 'BuiltPath' for more an explanation.
|
||||
*/
|
||||
struct BuiltPathBuilt {
|
||||
StorePath drvPath;
|
||||
ref<SingleBuiltPath> drvPath;
|
||||
std::map<std::string, StorePath> outputs;
|
||||
|
||||
nlohmann::json toJSON(ref<Store> store) const;
|
||||
static BuiltPathBuilt parse(const Store & store, std::string_view);
|
||||
std::string to_string(const Store & store) const;
|
||||
static BuiltPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
||||
nlohmann::json toJSON(const Store & store) const;
|
||||
|
||||
GENERATE_CMP(BuiltPathBuilt, me->drvPath, me->outputs);
|
||||
DECLARE_CMP(BuiltPathBuilt);
|
||||
};
|
||||
|
||||
using _BuiltPathRaw = std::variant<
|
||||
|
@ -41,6 +86,7 @@ struct BuiltPath : _BuiltPathRaw {
|
|||
StorePathSet outPaths() const;
|
||||
RealisedPath::Set toRealisedPaths(Store & store) const;
|
||||
|
||||
nlohmann::json toJSON(const Store & store) const;
|
||||
};
|
||||
|
||||
typedef std::vector<BuiltPath> BuiltPaths;
|
||||
|
|
|
@ -98,7 +98,7 @@ EvalCommand::EvalCommand()
|
|||
EvalCommand::~EvalCommand()
|
||||
{
|
||||
if (evalState)
|
||||
evalState->printStats();
|
||||
evalState->maybePrintStats();
|
||||
}
|
||||
|
||||
ref<Store> EvalCommand::getEvalStore()
|
||||
|
|
|
@ -34,21 +34,28 @@ struct NixMultiCommand : virtual MultiCommand, virtual Command
|
|||
// For the overloaded run methods
|
||||
#pragma GCC diagnostic ignored "-Woverloaded-virtual"
|
||||
|
||||
/* A command that requires a Nix store. */
|
||||
/**
|
||||
* A command that requires a \ref Store "Nix store".
|
||||
*/
|
||||
struct StoreCommand : virtual Command
|
||||
{
|
||||
StoreCommand();
|
||||
void run() override;
|
||||
ref<Store> getStore();
|
||||
virtual ref<Store> createStore();
|
||||
/**
|
||||
* Main entry point, with a `Store` provided
|
||||
*/
|
||||
virtual void run(ref<Store>) = 0;
|
||||
|
||||
private:
|
||||
std::shared_ptr<Store> _store;
|
||||
};
|
||||
|
||||
/* A command that copies something between `--from` and `--to`
|
||||
stores. */
|
||||
/**
|
||||
* A command that copies something between `--from` and `--to` \ref
|
||||
* Store stores.
|
||||
*/
|
||||
struct CopyCommand : virtual StoreCommand
|
||||
{
|
||||
std::string srcUri, dstUri;
|
||||
|
@ -60,6 +67,9 @@ struct CopyCommand : virtual StoreCommand
|
|||
ref<Store> getDstStore();
|
||||
};
|
||||
|
||||
/**
|
||||
* A command that needs to evaluate Nix language expressions.
|
||||
*/
|
||||
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
||||
{
|
||||
bool startReplOnEvalErrors = false;
|
||||
|
@ -79,20 +89,26 @@ private:
|
|||
std::shared_ptr<EvalState> evalState;
|
||||
};
|
||||
|
||||
/**
|
||||
* A mixin class for commands that process flakes, adding a few standard
|
||||
* flake-related options/flags.
|
||||
*/
|
||||
struct MixFlakeOptions : virtual Args, EvalCommand
|
||||
{
|
||||
flake::LockFlags lockFlags;
|
||||
|
||||
std::optional<std::string> needsFlakeInputCompletion = {};
|
||||
|
||||
MixFlakeOptions();
|
||||
|
||||
virtual std::vector<std::string> getFlakesForCompletion()
|
||||
/**
|
||||
* The completion for some of these flags depends on the flake(s) in
|
||||
* question.
|
||||
*
|
||||
* This method should be implemented to gather all flakerefs the
|
||||
* command is operating with (presumably specified via some other
|
||||
* arguments) so that the completions for these flags can use them.
|
||||
*/
|
||||
virtual std::vector<FlakeRef> getFlakeRefsForCompletion()
|
||||
{ return {}; }
|
||||
|
||||
void completeFlakeInput(std::string_view prefix);
|
||||
|
||||
void completionHook() override;
|
||||
};
|
||||
|
||||
struct SourceExprCommand : virtual Args, MixFlakeOptions
|
||||
|
@ -112,15 +128,35 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions
|
|||
|
||||
virtual Strings getDefaultFlakeAttrPathPrefixes();
|
||||
|
||||
void completeInstallable(std::string_view prefix);
|
||||
/**
|
||||
* Complete an installable from the given prefix.
|
||||
*/
|
||||
void completeInstallable(AddCompletions & completions, std::string_view prefix);
|
||||
|
||||
/**
|
||||
* Convenience wrapper around the underlying function to make setting the
|
||||
* callback easier.
|
||||
*/
|
||||
CompleterClosure getCompleteInstallable();
|
||||
};
|
||||
|
||||
/**
|
||||
* A mixin class for commands that need a read-only flag.
|
||||
*
|
||||
* What exactly is "read-only" is unspecified, but it will usually be
|
||||
* the \ref Store "Nix store".
|
||||
*/
|
||||
struct MixReadOnlyOption : virtual Args
|
||||
{
|
||||
MixReadOnlyOption();
|
||||
};
|
||||
|
||||
/* Like InstallablesCommand but the installables are not loaded */
|
||||
/**
|
||||
* Like InstallablesCommand but the installables are not loaded.
|
||||
*
|
||||
* This is needed by `CmdRepl` which wants to load (and reload) the
|
||||
* installables itself.
|
||||
*/
|
||||
struct RawInstallablesCommand : virtual Args, SourceExprCommand
|
||||
{
|
||||
RawInstallablesCommand();
|
||||
|
@ -129,19 +165,22 @@ struct RawInstallablesCommand : virtual Args, SourceExprCommand
|
|||
|
||||
void run(ref<Store> store) override;
|
||||
|
||||
// FIXME make const after CmdRepl's override is fixed up
|
||||
// FIXME make const after `CmdRepl`'s override is fixed up
|
||||
virtual void applyDefaultInstallables(std::vector<std::string> & rawInstallables);
|
||||
|
||||
bool readFromStdIn = false;
|
||||
|
||||
std::vector<std::string> getFlakesForCompletion() override;
|
||||
std::vector<FlakeRef> getFlakeRefsForCompletion() override;
|
||||
|
||||
private:
|
||||
|
||||
std::vector<std::string> rawInstallables;
|
||||
};
|
||||
/* A command that operates on a list of "installables", which can be
|
||||
store paths, attribute paths, Nix expressions, etc. */
|
||||
|
||||
/**
|
||||
* A command that operates on a list of "installables", which can be
|
||||
* store paths, attribute paths, Nix expressions, etc.
|
||||
*/
|
||||
struct InstallablesCommand : RawInstallablesCommand
|
||||
{
|
||||
virtual void run(ref<Store> store, Installables && installables) = 0;
|
||||
|
@ -149,7 +188,9 @@ struct InstallablesCommand : RawInstallablesCommand
|
|||
void run(ref<Store> store, std::vector<std::string> && rawInstallables) override;
|
||||
};
|
||||
|
||||
/* A command that operates on exactly one "installable" */
|
||||
/**
|
||||
* A command that operates on exactly one "installable".
|
||||
*/
|
||||
struct InstallableCommand : virtual Args, SourceExprCommand
|
||||
{
|
||||
InstallableCommand();
|
||||
|
@ -158,10 +199,7 @@ struct InstallableCommand : virtual Args, SourceExprCommand
|
|||
|
||||
void run(ref<Store> store) override;
|
||||
|
||||
std::vector<std::string> getFlakesForCompletion() override
|
||||
{
|
||||
return {_installable};
|
||||
}
|
||||
std::vector<FlakeRef> getFlakeRefsForCompletion() override;
|
||||
|
||||
private:
|
||||
|
||||
|
@ -175,7 +213,12 @@ struct MixOperateOnOptions : virtual Args
|
|||
MixOperateOnOptions();
|
||||
};
|
||||
|
||||
/* A command that operates on zero or more store paths. */
|
||||
/**
|
||||
* A command that operates on zero or more extant store paths.
|
||||
*
|
||||
* If the argument the user passes is a some sort of recipe for a path
|
||||
* not yet built, it must be built first.
|
||||
*/
|
||||
struct BuiltPathsCommand : InstallablesCommand, virtual MixOperateOnOptions
|
||||
{
|
||||
private:
|
||||
|
@ -207,7 +250,9 @@ struct StorePathsCommand : public BuiltPathsCommand
|
|||
void run(ref<Store> store, BuiltPaths && paths) override;
|
||||
};
|
||||
|
||||
/* A command that operates on exactly one store path. */
|
||||
/**
|
||||
* A command that operates on exactly one store path.
|
||||
*/
|
||||
struct StorePathCommand : public StorePathsCommand
|
||||
{
|
||||
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
|
||||
|
@ -215,7 +260,9 @@ struct StorePathCommand : public StorePathsCommand
|
|||
void run(ref<Store> store, StorePaths && storePaths) override;
|
||||
};
|
||||
|
||||
/* A helper class for registering commands globally. */
|
||||
/**
|
||||
* A helper class for registering \ref Command commands globally.
|
||||
*/
|
||||
struct RegisterCommand
|
||||
{
|
||||
typedef std::map<std::vector<std::string>, std::function<ref<Command>()>> Commands;
|
||||
|
@ -271,13 +318,18 @@ struct MixEnvironment : virtual Args {
|
|||
|
||||
MixEnvironment();
|
||||
|
||||
/* Modify global environ based on ignoreEnvironment, keep, and unset. It's expected that exec will be called before this class goes out of scope, otherwise environ will become invalid. */
|
||||
/***
|
||||
* Modify global environ based on `ignoreEnvironment`, `keep`, and
|
||||
* `unset`. It's expected that exec will be called before this class
|
||||
* goes out of scope, otherwise `environ` will become invalid.
|
||||
*/
|
||||
void setEnviron();
|
||||
};
|
||||
|
||||
void completeFlakeRef(ref<Store> store, std::string_view prefix);
|
||||
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix);
|
||||
|
||||
void completeFlakeRefWithFragment(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
flake::LockFlags lockFlags,
|
||||
Strings attrPathPrefixes,
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "flake/flakeref.hh"
|
||||
#include "store-api.hh"
|
||||
#include "command.hh"
|
||||
#include "tarball.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -132,8 +133,8 @@ MixEvalArgs::MixEvalArgs()
|
|||
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
|
||||
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
completeFlakeRef(openStore(), prefix);
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeRef(completions, openStore(), prefix);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -168,14 +169,14 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
|
|||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).tree.storePath;
|
||||
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
|
||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
else if (hasPrefix(s, "flake:")) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
|
||||
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
|
||||
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first;
|
||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||
}
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
|||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||
return e;
|
||||
},
|
||||
}, extendedOutputsSpec.raw());
|
||||
}, extendedOutputsSpec.raw);
|
||||
|
||||
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
|
||||
|
||||
|
@ -92,10 +92,11 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
|||
for (auto & [drvPath, outputs] : byDrvPath)
|
||||
res.push_back({
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = make_ref<ExtraPathInfoValue>(ExtraPathInfoValue::Value {
|
||||
.extendedOutputsSpec = outputs,
|
||||
/* FIXME: reconsider backwards compatibility above
|
||||
so we can fill in this info. */
|
||||
}),
|
||||
|
@ -114,7 +115,7 @@ InstallableAttrPath InstallableAttrPath::parse(
|
|||
return {
|
||||
state, cmd, v,
|
||||
prefix == "." ? "" : std::string { prefix },
|
||||
extendedOutputsSpec
|
||||
std::move(extendedOutputsSpec),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -18,14 +18,7 @@ DerivedPathsWithInfo InstallableDerivedPath::toDerivedPaths()
|
|||
|
||||
std::optional<StorePath> InstallableDerivedPath::getStorePath()
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
return bfd.drvPath;
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
return bo.path;
|
||||
},
|
||||
}, derivedPath.raw());
|
||||
return derivedPath.getBaseStorePath();
|
||||
}
|
||||
|
||||
InstallableDerivedPath InstallableDerivedPath::parse(
|
||||
|
@ -42,7 +35,7 @@ InstallableDerivedPath InstallableDerivedPath::parse(
|
|||
// Remove this prior to stabilizing the new CLI.
|
||||
if (storePath.isDerivation()) {
|
||||
auto oldDerivedPath = DerivedPath::Built {
|
||||
.drvPath = storePath,
|
||||
.drvPath = makeConstantStorePathRef(storePath),
|
||||
.outputs = OutputsSpec::All { },
|
||||
};
|
||||
warn(
|
||||
|
@ -55,12 +48,14 @@ InstallableDerivedPath InstallableDerivedPath::parse(
|
|||
},
|
||||
// If the user did use ^, we just do exactly what is written.
|
||||
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
|
||||
auto drv = make_ref<SingleDerivedPath>(SingleDerivedPath::parse(*store, prefix));
|
||||
drvRequireExperiment(*drv);
|
||||
return DerivedPath::Built {
|
||||
.drvPath = store->parseStorePath(prefix),
|
||||
.drvPath = std::move(drv),
|
||||
.outputs = outputSpec,
|
||||
};
|
||||
},
|
||||
}, extendedOutputsSpec.raw());
|
||||
}, extendedOutputsSpec.raw);
|
||||
return InstallableDerivedPath {
|
||||
store,
|
||||
std::move(derivedPath),
|
||||
|
|
|
@ -28,6 +28,11 @@ namespace nix {
|
|||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||
{
|
||||
std::vector<std::string> res;
|
||||
if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){
|
||||
attrPaths.front().erase(0,1);
|
||||
res.push_back(attrPaths.front());
|
||||
return res;
|
||||
}
|
||||
|
||||
for (auto & prefix : prefixes)
|
||||
res.push_back(prefix + *attrPaths.begin());
|
||||
|
@ -118,7 +123,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
|||
|
||||
return {{
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = std::move(drvPath),
|
||||
.drvPath = makeConstantStorePathRef(std::move(drvPath)),
|
||||
.outputs = std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
|
@ -141,7 +146,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
|||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||
return e;
|
||||
},
|
||||
}, extendedOutputsSpec.raw()),
|
||||
}, extendedOutputsSpec.raw),
|
||||
},
|
||||
.info = make_ref<ExtraPathInfoFlake>(
|
||||
ExtraPathInfoValue::Value {
|
||||
|
|
|
@ -55,7 +55,8 @@ std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths
|
|||
|
||||
else if (v.type() == nString) {
|
||||
return {{
|
||||
.path = state->coerceToDerivedPath(pos, v, errorCtx),
|
||||
.path = DerivedPath::fromSingle(
|
||||
state->coerceToSingleDerivedPath(pos, v, errorCtx)),
|
||||
.info = make_ref<ExtraPathInfo>(),
|
||||
}};
|
||||
}
|
||||
|
|
|
@ -28,6 +28,20 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static void completeFlakeInputPath(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
const std::vector<FlakeRef> & flakeRefs,
|
||||
std::string_view prefix)
|
||||
{
|
||||
for (auto & flakeRef : flakeRefs) {
|
||||
auto flake = flake::getFlake(*evalState, flakeRef, true);
|
||||
for (auto & input : flake.inputs)
|
||||
if (hasPrefix(input.first, prefix))
|
||||
completions.add(input.first);
|
||||
}
|
||||
}
|
||||
|
||||
MixFlakeOptions::MixFlakeOptions()
|
||||
{
|
||||
auto category = "Common flake-related options";
|
||||
|
@ -79,8 +93,8 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
.handler = {[&](std::string s) {
|
||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
needsFlakeInputCompletion = {std::string(prefix)};
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -95,11 +109,12 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
flake::parseInputPath(inputPath),
|
||||
parseFlakeRef(flakeRef, absPath("."), true));
|
||||
}},
|
||||
.completer = {[&](size_t n, std::string_view prefix) {
|
||||
if (n == 0)
|
||||
needsFlakeInputCompletion = {std::string(prefix)};
|
||||
else if (n == 1)
|
||||
completeFlakeRef(getEvalState()->store, prefix);
|
||||
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
|
||||
if (n == 0) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
} else if (n == 1) {
|
||||
completeFlakeRef(completions, getEvalState()->store, prefix);
|
||||
}
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -146,30 +161,12 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
}
|
||||
}
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
completeFlakeRef(getEvalState()->store, prefix);
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeRef(completions, getEvalState()->store, prefix);
|
||||
}}
|
||||
});
|
||||
}
|
||||
|
||||
void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
|
||||
{
|
||||
auto evalState = getEvalState();
|
||||
for (auto & flakeRefS : getFlakesForCompletion()) {
|
||||
auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
|
||||
auto flake = flake::getFlake(*evalState, flakeRef, true);
|
||||
for (auto & input : flake.inputs)
|
||||
if (hasPrefix(input.first, prefix))
|
||||
completions->add(input.first);
|
||||
}
|
||||
}
|
||||
|
||||
void MixFlakeOptions::completionHook()
|
||||
{
|
||||
if (auto & prefix = needsFlakeInputCompletion)
|
||||
completeFlakeInput(*prefix);
|
||||
}
|
||||
|
||||
SourceExprCommand::SourceExprCommand()
|
||||
{
|
||||
addFlag({
|
||||
|
@ -226,11 +223,18 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
|||
};
|
||||
}
|
||||
|
||||
void SourceExprCommand::completeInstallable(std::string_view prefix)
|
||||
Args::CompleterClosure SourceExprCommand::getCompleteInstallable()
|
||||
{
|
||||
return [this](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeInstallable(completions, prefix);
|
||||
};
|
||||
}
|
||||
|
||||
void SourceExprCommand::completeInstallable(AddCompletions & completions, std::string_view prefix)
|
||||
{
|
||||
try {
|
||||
if (file) {
|
||||
completionType = ctAttrs;
|
||||
completions.setType(AddCompletions::Type::Attrs);
|
||||
|
||||
evalSettings.pureEval = false;
|
||||
auto state = getEvalState();
|
||||
|
@ -265,14 +269,15 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
|||
std::string name = state->symbols[i.name];
|
||||
if (name.find(searchWord) == 0) {
|
||||
if (prefix_ == "")
|
||||
completions->add(name);
|
||||
completions.add(name);
|
||||
else
|
||||
completions->add(prefix_ + "." + name);
|
||||
completions.add(prefix_ + "." + name);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
completeFlakeRefWithFragment(
|
||||
completions,
|
||||
getEvalState(),
|
||||
lockFlags,
|
||||
getDefaultFlakeAttrPathPrefixes(),
|
||||
|
@ -285,6 +290,7 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
|||
}
|
||||
|
||||
void completeFlakeRefWithFragment(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
flake::LockFlags lockFlags,
|
||||
Strings attrPathPrefixes,
|
||||
|
@ -296,11 +302,16 @@ void completeFlakeRefWithFragment(
|
|||
try {
|
||||
auto hash = prefix.find('#');
|
||||
if (hash == std::string::npos) {
|
||||
completeFlakeRef(evalState->store, prefix);
|
||||
completeFlakeRef(completions, evalState->store, prefix);
|
||||
} else {
|
||||
completionType = ctAttrs;
|
||||
completions.setType(AddCompletions::Type::Attrs);
|
||||
|
||||
auto fragment = prefix.substr(hash + 1);
|
||||
std::string prefixRoot = "";
|
||||
if (fragment.starts_with(".")){
|
||||
fragment = fragment.substr(1);
|
||||
prefixRoot = ".";
|
||||
}
|
||||
auto flakeRefS = std::string(prefix.substr(0, hash));
|
||||
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
||||
|
||||
|
@ -309,6 +320,9 @@ void completeFlakeRefWithFragment(
|
|||
|
||||
auto root = evalCache->getRoot();
|
||||
|
||||
if (prefixRoot == "."){
|
||||
attrPathPrefixes.clear();
|
||||
}
|
||||
/* Complete 'fragment' relative to all the
|
||||
attrpath prefixes as well as the root of the
|
||||
flake. */
|
||||
|
@ -333,7 +347,7 @@ void completeFlakeRefWithFragment(
|
|||
auto attrPath2 = (*attr)->getAttrPath(attr2);
|
||||
/* Strip the attrpath prefix. */
|
||||
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
|
||||
completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||
completions.add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -344,7 +358,7 @@ void completeFlakeRefWithFragment(
|
|||
for (auto & attrPath : defaultFlakeAttrPaths) {
|
||||
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
|
||||
if (!attr) continue;
|
||||
completions->add(flakeRefS + "#");
|
||||
completions.add(flakeRefS + "#" + prefixRoot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -353,15 +367,15 @@ void completeFlakeRefWithFragment(
|
|||
}
|
||||
}
|
||||
|
||||
void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
||||
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix)
|
||||
{
|
||||
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
return;
|
||||
|
||||
if (prefix == "")
|
||||
completions->add(".");
|
||||
completions.add(".");
|
||||
|
||||
completeDir(0, prefix);
|
||||
Args::completeDir(completions, 0, prefix);
|
||||
|
||||
/* Look for registry entries that match the prefix. */
|
||||
for (auto & registry : fetchers::getRegistries(store)) {
|
||||
|
@ -370,10 +384,10 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
|||
if (!hasPrefix(prefix, "flake:") && hasPrefix(from, "flake:")) {
|
||||
std::string from2(from, 6);
|
||||
if (hasPrefix(from2, prefix))
|
||||
completions->add(from2);
|
||||
completions.add(from2);
|
||||
} else {
|
||||
if (hasPrefix(from, prefix))
|
||||
completions->add(from);
|
||||
completions.add(from);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -459,7 +473,7 @@ Installables SourceExprCommand::parseInstallables(
|
|||
result.push_back(
|
||||
make_ref<InstallableAttrPath>(
|
||||
InstallableAttrPath::parse(
|
||||
state, *this, vFile, prefix, extendedOutputsSpec)));
|
||||
state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -475,7 +489,7 @@ Installables SourceExprCommand::parseInstallables(
|
|||
if (prefix.find('/') != std::string::npos) {
|
||||
try {
|
||||
result.push_back(make_ref<InstallableDerivedPath>(
|
||||
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec)));
|
||||
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
|
||||
continue;
|
||||
} catch (BadStorePath &) {
|
||||
} catch (...) {
|
||||
|
@ -491,7 +505,7 @@ Installables SourceExprCommand::parseInstallables(
|
|||
getEvalState(),
|
||||
std::move(flakeRef),
|
||||
fragment,
|
||||
extendedOutputsSpec,
|
||||
std::move(extendedOutputsSpec),
|
||||
getDefaultFlakeAttrPaths(),
|
||||
getDefaultFlakeAttrPathPrefixes(),
|
||||
lockFlags));
|
||||
|
@ -515,6 +529,30 @@ ref<Installable> SourceExprCommand::parseInstallable(
|
|||
return installables.front();
|
||||
}
|
||||
|
||||
static SingleBuiltPath getBuiltPath(ref<Store> evalStore, ref<Store> store, const SingleDerivedPath & b)
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath {
|
||||
return SingleBuiltPath::Opaque { bo.path };
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & bfd) -> SingleBuiltPath {
|
||||
auto drvPath = getBuiltPath(evalStore, store, *bfd.drvPath);
|
||||
// Resolving this instead of `bfd` will yield the same result, but avoid duplicative work.
|
||||
SingleDerivedPath::Built truncatedBfd {
|
||||
.drvPath = makeConstantStorePathRef(drvPath.outPath()),
|
||||
.output = bfd.output,
|
||||
};
|
||||
auto outputPath = resolveDerivedPath(*store, truncatedBfd, &*evalStore);
|
||||
return SingleBuiltPath::Built {
|
||||
.drvPath = make_ref<SingleBuiltPath>(std::move(drvPath)),
|
||||
.output = { bfd.output, outputPath },
|
||||
};
|
||||
},
|
||||
},
|
||||
b.raw());
|
||||
}
|
||||
|
||||
std::vector<BuiltPathWithResult> Installable::build(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
|
@ -568,7 +606,10 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
|||
[&](const DerivedPath::Built & bfd) {
|
||||
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||
.path = BuiltPath::Built {
|
||||
.drvPath = make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = aux.info}});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
|
@ -597,7 +638,10 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
|||
for (auto & [outputName, realisation] : buildResult.builtOutputs)
|
||||
outputs.emplace(outputName, realisation.outPath);
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||
.path = BuiltPath::Built {
|
||||
.drvPath = make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = aux.info,
|
||||
.result = buildResult}});
|
||||
},
|
||||
|
@ -691,7 +735,7 @@ StorePathSet Installable::toDerivations(
|
|||
: throw Error("argument '%s' did not evaluate to a derivation", i->what()));
|
||||
},
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
drvPaths.insert(bfd.drvPath);
|
||||
drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath));
|
||||
},
|
||||
}, b.path.raw());
|
||||
|
||||
|
@ -709,9 +753,7 @@ RawInstallablesCommand::RawInstallablesCommand()
|
|||
expectArgs({
|
||||
.label = "installables",
|
||||
.handler = {&rawInstallables},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
completeInstallable(prefix);
|
||||
}}
|
||||
.completer = getCompleteInstallable(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -724,6 +766,17 @@ void RawInstallablesCommand::applyDefaultInstallables(std::vector<std::string> &
|
|||
}
|
||||
}
|
||||
|
||||
std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
|
||||
{
|
||||
applyDefaultInstallables(rawInstallables);
|
||||
std::vector<FlakeRef> res;
|
||||
for (auto i : rawInstallables)
|
||||
res.push_back(parseFlakeRefWithFragment(
|
||||
expandTilde(i),
|
||||
absPath(".")).first);
|
||||
return res;
|
||||
}
|
||||
|
||||
void RawInstallablesCommand::run(ref<Store> store)
|
||||
{
|
||||
if (readFromStdIn && !isatty(STDIN_FILENO)) {
|
||||
|
@ -737,10 +790,13 @@ void RawInstallablesCommand::run(ref<Store> store)
|
|||
run(store, std::move(rawInstallables));
|
||||
}
|
||||
|
||||
std::vector<std::string> RawInstallablesCommand::getFlakesForCompletion()
|
||||
std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
|
||||
{
|
||||
applyDefaultInstallables(rawInstallables);
|
||||
return rawInstallables;
|
||||
return {
|
||||
parseFlakeRefWithFragment(
|
||||
expandTilde(_installable),
|
||||
absPath(".")).first
|
||||
};
|
||||
}
|
||||
|
||||
void InstallablesCommand::run(ref<Store> store, std::vector<std::string> && rawInstallables)
|
||||
|
@ -756,9 +812,7 @@ InstallableCommand::InstallableCommand()
|
|||
.label = "installable",
|
||||
.optional = true,
|
||||
.handler = {&_installable},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
completeInstallable(prefix);
|
||||
}}
|
||||
.completer = getCompleteInstallable(),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -488,35 +488,40 @@ bool NixRepl::processLine(std::string line)
|
|||
std::cout
|
||||
<< "The following commands are available:\n"
|
||||
<< "\n"
|
||||
<< " <expr> Evaluate and print expression\n"
|
||||
<< " <x> = <expr> Bind expression to variable\n"
|
||||
<< " :a <expr> Add attributes from resulting set to scope\n"
|
||||
<< " :b <expr> Build a derivation\n"
|
||||
<< " :bl <expr> Build a derivation, creating GC roots in the working directory\n"
|
||||
<< " :e <expr> Open package or function in $EDITOR\n"
|
||||
<< " :i <expr> Build derivation, then install result into current profile\n"
|
||||
<< " :l <path> Load Nix expression and add it to scope\n"
|
||||
<< " :lf <ref> Load Nix flake and add it to scope\n"
|
||||
<< " :p <expr> Evaluate and print expression recursively\n"
|
||||
<< " :q Exit nix-repl\n"
|
||||
<< " :r Reload all files\n"
|
||||
<< " :sh <expr> Build dependencies of derivation, then start nix-shell\n"
|
||||
<< " :t <expr> Describe result of evaluation\n"
|
||||
<< " :u <expr> Build derivation, then start nix-shell\n"
|
||||
<< " :doc <expr> Show documentation of a builtin function\n"
|
||||
<< " :log <expr> Show logs for a derivation\n"
|
||||
<< " :te [bool] Enable, disable or toggle showing traces for errors\n"
|
||||
<< " <expr> Evaluate and print expression\n"
|
||||
<< " <x> = <expr> Bind expression to variable\n"
|
||||
<< " :a, :add <expr> Add attributes from resulting set to scope\n"
|
||||
<< " :b <expr> Build a derivation\n"
|
||||
<< " :bl <expr> Build a derivation, creating GC roots in the\n"
|
||||
<< " working directory\n"
|
||||
<< " :e, :edit <expr> Open package or function in $EDITOR\n"
|
||||
<< " :i <expr> Build derivation, then install result into\n"
|
||||
<< " current profile\n"
|
||||
<< " :l, :load <path> Load Nix expression and add it to scope\n"
|
||||
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
|
||||
<< " :p, :print <expr> Evaluate and print expression recursively\n"
|
||||
<< " :q, :quit Exit nix-repl\n"
|
||||
<< " :r, :reload Reload all files\n"
|
||||
<< " :sh <expr> Build dependencies of derivation, then start\n"
|
||||
<< " nix-shell\n"
|
||||
<< " :t <expr> Describe result of evaluation\n"
|
||||
<< " :u <expr> Build derivation, then start nix-shell\n"
|
||||
<< " :doc <expr> Show documentation of a builtin function\n"
|
||||
<< " :log <expr> Show logs for a derivation\n"
|
||||
<< " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n"
|
||||
<< " errors\n"
|
||||
<< " :?, :help Brings up this help menu\n"
|
||||
;
|
||||
if (state->debugRepl) {
|
||||
std::cout
|
||||
<< "\n"
|
||||
<< " Debug mode commands\n"
|
||||
<< " :env Show env stack\n"
|
||||
<< " :bt Show trace stack\n"
|
||||
<< " :st Show current trace\n"
|
||||
<< " :st <idx> Change to another trace in the stack\n"
|
||||
<< " :c Go until end of program, exception, or builtins.break\n"
|
||||
<< " :s Go one step\n"
|
||||
<< " :env Show env stack\n"
|
||||
<< " :bt, :backtrace Show trace stack\n"
|
||||
<< " :st Show current trace\n"
|
||||
<< " :st <idx> Change to another trace in the stack\n"
|
||||
<< " :c, :continue Go until end of program, exception, or builtins.break\n"
|
||||
<< " :s, :step Go one step\n"
|
||||
;
|
||||
}
|
||||
|
||||
|
@ -648,7 +653,7 @@ bool NixRepl::processLine(std::string line)
|
|||
if (command == ":b" || command == ":bl") {
|
||||
state->store->buildPaths({
|
||||
DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
});
|
||||
|
@ -917,7 +922,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
|||
|
||||
case nString:
|
||||
str << ANSI_WARNING;
|
||||
printLiteralString(str, v.string.s);
|
||||
printLiteralString(str, v.string_view());
|
||||
str << ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v
|
|||
if (colon == std::string::npos) fail();
|
||||
std::string filename(fn, 0, colon);
|
||||
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
|
||||
return {CanonPath(fn.substr(0, colon)), lineno};
|
||||
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
|
||||
} catch (std::invalid_argument & e) {
|
||||
fail();
|
||||
abort();
|
||||
|
|
|
@ -50,7 +50,7 @@ struct AttrDb
|
|||
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
|
||||
createDirs(cacheDir);
|
||||
|
||||
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
|
||||
Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite";
|
||||
|
||||
state->db = SQLite(dbPath);
|
||||
state->db.isCache();
|
||||
|
@ -440,8 +440,8 @@ Value & AttrCursor::forceValue()
|
|||
|
||||
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
|
||||
if (v.type() == nString)
|
||||
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
|
||||
string_t{v.string.s, {}}};
|
||||
cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()),
|
||||
string_t{v.c_str(), {}}};
|
||||
else if (v.type() == nPath) {
|
||||
auto path = v.path().path;
|
||||
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
|
||||
|
@ -582,7 +582,7 @@ std::string AttrCursor::getString()
|
|||
if (v.type() != nString && v.type() != nPath)
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
|
||||
return v.type() == nString ? v.string.s : v.path().to_string();
|
||||
return v.type() == nString ? v.c_str() : v.path().to_string();
|
||||
}
|
||||
|
||||
string_t AttrCursor::getStringWithContext()
|
||||
|
@ -599,12 +599,12 @@ string_t AttrCursor::getStringWithContext()
|
|||
return d.drvPath;
|
||||
},
|
||||
[&](const NixStringContextElem::Built & b) -> const StorePath & {
|
||||
return b.drvPath;
|
||||
return b.drvPath->getBaseStorePath();
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
|
||||
return o.path;
|
||||
},
|
||||
}, c.raw());
|
||||
}, c.raw);
|
||||
if (!root->state.store->isValidPath(path)) {
|
||||
valid = false;
|
||||
break;
|
||||
|
@ -624,7 +624,7 @@ string_t AttrCursor::getStringWithContext()
|
|||
if (v.type() == nString) {
|
||||
NixStringContext context;
|
||||
copyContext(v, context);
|
||||
return {v.string.s, std::move(context)};
|
||||
return {v.c_str(), std::move(context)};
|
||||
}
|
||||
else if (v.type() == nPath)
|
||||
return {v.path().to_string(), {}};
|
||||
|
|
|
@ -63,7 +63,7 @@ Strings EvalSettings::getDefaultNixPath()
|
|||
};
|
||||
|
||||
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
|
||||
add(settings.useXDGBaseDirectories ? getStateDir() + "/nix/defexpr/channels" : getHome() + "/.nix-defexpr/channels");
|
||||
add(getNixDefExpr() + "/channels");
|
||||
add(rootChannelsDir() + "/nixpkgs", "nixpkgs");
|
||||
add(rootChannelsDir());
|
||||
}
|
||||
|
@ -92,4 +92,11 @@ EvalSettings evalSettings;
|
|||
|
||||
static GlobalConfig::Register rEvalSettings(&evalSettings);
|
||||
|
||||
Path getNixDefExpr()
|
||||
{
|
||||
return settings.useXDGBaseDirectories
|
||||
? getStateDir() + "/nix/defexpr"
|
||||
: getHome() + "/.nix-defexpr";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ struct EvalSettings : Config
|
|||
R"(
|
||||
List of directories to be searched for `<...>` file references
|
||||
|
||||
In particular, outside of [pure evaluation mode](#conf-pure-evaluation), this determines the value of
|
||||
In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of
|
||||
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
|
||||
)"};
|
||||
|
||||
|
@ -29,10 +29,12 @@ struct EvalSettings : Config
|
|||
this, false, "restrict-eval",
|
||||
R"(
|
||||
If set to `true`, the Nix evaluator will not allow access to any
|
||||
files outside of the Nix search path (as set via the `NIX_PATH`
|
||||
environment variable or the `-I` option), or to URIs outside of
|
||||
[`allowed-uris`](../command-ref/conf-file.md#conf-allowed-uris).
|
||||
The default is `false`.
|
||||
files outside of
|
||||
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
|
||||
or to URIs outside of
|
||||
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
|
||||
|
||||
Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
|
||||
)"};
|
||||
|
||||
Setting<bool> pureEval{this, false, "pure-eval",
|
||||
|
@ -40,18 +42,22 @@ struct EvalSettings : Config
|
|||
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
|
||||
|
||||
- Restrict file system and network access to files specified by cryptographic hash
|
||||
- Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
|
||||
- Disable impure constants:
|
||||
- [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
|
||||
- [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
|
||||
- [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath)
|
||||
)"
|
||||
};
|
||||
|
||||
Setting<bool> enableImportFromDerivation{
|
||||
this, true, "allow-import-from-derivation",
|
||||
R"(
|
||||
By default, Nix allows you to `import` from a derivation, allowing
|
||||
building at evaluation time. With this option set to false, Nix will
|
||||
throw an error when evaluating an expression that uses this feature,
|
||||
allowing users to ensure their evaluation will not require any
|
||||
builds to take place.
|
||||
By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md).
|
||||
|
||||
With this option set to `false`, Nix will throw an error when evaluating an expression that uses this feature,
|
||||
even when the required store object is readily available.
|
||||
This ensures that evaluation will not require any builds to take place,
|
||||
regardless of the state of the store.
|
||||
)"};
|
||||
|
||||
Setting<Strings> allowedUris{this, {}, "allowed-uris",
|
||||
|
@ -95,4 +101,9 @@ struct EvalSettings : Config
|
|||
|
||||
extern EvalSettings evalSettings;
|
||||
|
||||
/**
|
||||
* Conventionally part of the default nix path in impure mode.
|
||||
*/
|
||||
Path getNixDefExpr();
|
||||
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@
|
|||
#include "function-trace.hh"
|
||||
#include "profiles.hh"
|
||||
#include "print.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "memory-input-accessor.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
@ -114,7 +116,7 @@ void Value::print(const SymbolTable &symbols, std::ostream &str,
|
|||
printLiteralBool(str, boolean);
|
||||
break;
|
||||
case tString:
|
||||
printLiteralString(str, string.s);
|
||||
printLiteralString(str, string_view());
|
||||
break;
|
||||
case tPath:
|
||||
str << path().to_string(); // !!! escaping?
|
||||
|
@ -339,7 +341,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
|||
Value nameValue;
|
||||
name.expr->eval(state, env, nameValue);
|
||||
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
|
||||
return state.symbols.create(nameValue.string.s);
|
||||
return state.symbols.create(nameValue.string_view());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -503,7 +505,17 @@ EvalState::EvalState(
|
|||
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||
, repair(NoRepair)
|
||||
, emptyBindings(0)
|
||||
, derivationInternal(rootPath(CanonPath("/builtin/derivation.nix")))
|
||||
, rootFS(makeFSInputAccessor(CanonPath::root))
|
||||
, corepkgsFS(makeMemoryInputAccessor())
|
||||
, internalFS(makeMemoryInputAccessor())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
CanonPath("derivation-internal.nix"),
|
||||
#include "primops/derivation.nix.gen.hh"
|
||||
)}
|
||||
, callFlakeInternal{internalFS->addFile(
|
||||
CanonPath("call-flake.nix"),
|
||||
#include "flake/call-flake.nix.gen.hh"
|
||||
)}
|
||||
, store(store)
|
||||
, buildStore(buildStore ? buildStore : store)
|
||||
, debugRepl(nullptr)
|
||||
|
@ -527,9 +539,9 @@ EvalState::EvalState(
|
|||
/* Initialise the Nix expression search path. */
|
||||
if (!evalSettings.pureEval) {
|
||||
for (auto & i : _searchPath.elements)
|
||||
addToSearchPath(SearchPath::Elem {i});
|
||||
searchPath.elements.emplace_back(SearchPath::Elem {i});
|
||||
for (auto & i : evalSettings.nixPath.get())
|
||||
addToSearchPath(SearchPath::Elem::parse(i));
|
||||
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
|
||||
}
|
||||
|
||||
if (evalSettings.restrictEval || evalSettings.pureEval) {
|
||||
|
@ -539,7 +551,7 @@ EvalState::EvalState(
|
|||
auto r = resolveSearchPathPath(i.path);
|
||||
if (!r) continue;
|
||||
|
||||
auto path = *std::move(r);
|
||||
auto path = std::move(*r);
|
||||
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
|
@ -555,6 +567,11 @@ EvalState::EvalState(
|
|||
}
|
||||
}
|
||||
|
||||
corepkgsFS->addFile(
|
||||
CanonPath("fetchurl.nix"),
|
||||
#include "fetchurl.nix.gen.hh"
|
||||
);
|
||||
|
||||
createBaseEnv();
|
||||
}
|
||||
|
||||
|
@ -585,6 +602,9 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
|||
|
||||
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
||||
{
|
||||
// Don't check non-rootFS accessors, they're in a different namespace.
|
||||
if (path_.accessor != ref<InputAccessor>(rootFS)) return path_;
|
||||
|
||||
if (!allowedPaths) return path_;
|
||||
|
||||
auto i = resolvedPaths.find(path_.path.abs());
|
||||
|
@ -599,8 +619,6 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
|||
*/
|
||||
Path abspath = canonPath(path_.path.abs());
|
||||
|
||||
if (hasPrefix(abspath, corepkgsPrefix)) return CanonPath(abspath);
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(abspath, i)) {
|
||||
found = true;
|
||||
|
@ -617,7 +635,7 @@ SourcePath EvalState::checkSourcePath(const SourcePath & path_)
|
|||
|
||||
/* Resolve symlinks. */
|
||||
debug("checking access to '%s'", abspath);
|
||||
SourcePath path = CanonPath(canonPath(abspath, true));
|
||||
SourcePath path = rootPath(CanonPath(canonPath(abspath, true)));
|
||||
|
||||
for (auto & i : *allowedPaths) {
|
||||
if (isDirOrInDir(path.path.abs(), i)) {
|
||||
|
@ -649,12 +667,12 @@ void EvalState::checkURI(const std::string & uri)
|
|||
/* If the URI is a path, then check it against allowedPaths as
|
||||
well. */
|
||||
if (hasPrefix(uri, "/")) {
|
||||
checkSourcePath(CanonPath(uri));
|
||||
checkSourcePath(rootPath(CanonPath(uri)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (hasPrefix(uri, "file://")) {
|
||||
checkSourcePath(CanonPath(std::string(uri, 7)));
|
||||
checkSourcePath(rootPath(CanonPath(std::string(uri, 7))));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -950,7 +968,7 @@ void Value::mkStringMove(const char * s, const NixStringContext & context)
|
|||
|
||||
void Value::mkPath(const SourcePath & path)
|
||||
{
|
||||
mkPath(makeImmutableString(path.path.abs()));
|
||||
mkPath(&*path.accessor, makeImmutableString(path.path.abs()));
|
||||
}
|
||||
|
||||
|
||||
|
@ -1027,24 +1045,67 @@ void EvalState::mkStorePathString(const StorePath & p, Value & v)
|
|||
}
|
||||
|
||||
|
||||
std::string EvalState::mkOutputStringRaw(
|
||||
const SingleDerivedPath::Built & b,
|
||||
std::optional<StorePath> optStaticOutputPath,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
/* In practice, this is testing for the case of CA derivations, or
|
||||
dynamic derivations. */
|
||||
return optStaticOutputPath
|
||||
? store->printStorePath(std::move(*optStaticOutputPath))
|
||||
/* Downstream we would substitute this for an actual path once
|
||||
we build the floating CA derivation */
|
||||
: DownstreamPlaceholder::fromSingleDerivedPathBuilt(b, xpSettings).render();
|
||||
}
|
||||
|
||||
|
||||
void EvalState::mkOutputString(
|
||||
Value & value,
|
||||
const StorePath & drvPath,
|
||||
const std::string outputName,
|
||||
std::optional<StorePath> optOutputPath,
|
||||
const SingleDerivedPath::Built & b,
|
||||
std::optional<StorePath> optStaticOutputPath,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
value.mkString(
|
||||
optOutputPath
|
||||
? store->printStorePath(*std::move(optOutputPath))
|
||||
/* Downstream we would substitute this for an actual path once
|
||||
we build the floating CA derivation */
|
||||
: DownstreamPlaceholder::unknownCaOutput(drvPath, outputName, xpSettings).render(),
|
||||
mkOutputStringRaw(b, optStaticOutputPath, xpSettings),
|
||||
NixStringContext { b });
|
||||
}
|
||||
|
||||
|
||||
std::string EvalState::mkSingleDerivedPathStringRaw(
|
||||
const SingleDerivedPath & p)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const SingleDerivedPath::Opaque & o) {
|
||||
return store->printStorePath(o.path);
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & b) {
|
||||
auto optStaticOutputPath = std::visit(overloaded {
|
||||
[&](const SingleDerivedPath::Opaque & o) {
|
||||
auto drv = store->readDerivation(o.path);
|
||||
auto i = drv.outputs.find(b.output);
|
||||
if (i == drv.outputs.end())
|
||||
throw Error("derivation '%s' does not have output '%s'", b.drvPath->to_string(*store), b.output);
|
||||
return i->second.path(*store, drv.name, b.output);
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & o) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
}, b.drvPath->raw());
|
||||
return mkOutputStringRaw(b, optStaticOutputPath);
|
||||
}
|
||||
}, p.raw());
|
||||
}
|
||||
|
||||
|
||||
void EvalState::mkSingleDerivedPathString(
|
||||
const SingleDerivedPath & p,
|
||||
Value & v)
|
||||
{
|
||||
v.mkString(
|
||||
mkSingleDerivedPathStringRaw(p),
|
||||
NixStringContext {
|
||||
NixStringContextElem::Built {
|
||||
.drvPath = drvPath,
|
||||
.output = outputName,
|
||||
}
|
||||
std::visit([](auto && v) -> NixStringContextElem { return v; }, p),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1122,24 +1183,6 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial
|
|||
if (!e)
|
||||
e = parseExprFromFile(checkSourcePath(resolvedPath));
|
||||
|
||||
cacheFile(path, resolvedPath, e, v, mustBeTrivial);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::resetFileCache()
|
||||
{
|
||||
fileEvalCache.clear();
|
||||
fileParseCache.clear();
|
||||
}
|
||||
|
||||
|
||||
void EvalState::cacheFile(
|
||||
const SourcePath & path,
|
||||
const SourcePath & resolvedPath,
|
||||
Expr * e,
|
||||
Value & v,
|
||||
bool mustBeTrivial)
|
||||
{
|
||||
fileParseCache[resolvedPath] = e;
|
||||
|
||||
try {
|
||||
|
@ -1168,6 +1211,13 @@ void EvalState::cacheFile(
|
|||
}
|
||||
|
||||
|
||||
void EvalState::resetFileCache()
|
||||
{
|
||||
fileEvalCache.clear();
|
||||
fileParseCache.clear();
|
||||
}
|
||||
|
||||
|
||||
void EvalState::eval(Expr * e, Value & v)
|
||||
{
|
||||
e->eval(*this, baseEnv, v);
|
||||
|
@ -1300,7 +1350,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
|||
if (nameVal.type() == nNull)
|
||||
continue;
|
||||
state.forceStringNoCtx(nameVal, i.pos, "while evaluating the name of a dynamic attribute");
|
||||
auto nameSym = state.symbols.create(nameVal.string.s);
|
||||
auto nameSym = state.symbols.create(nameVal.string_view());
|
||||
Bindings::iterator j = v.attrs->find(nameSym);
|
||||
if (j != v.attrs->end())
|
||||
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
|
@ -1994,7 +2044,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
else if (firstType == nPath) {
|
||||
if (!context.empty())
|
||||
state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
v.mkPath(CanonPath(canonPath(str())));
|
||||
v.mkPath(state.rootPath(CanonPath(canonPath(str()))));
|
||||
} else
|
||||
v.mkStringMove(c_str(), context);
|
||||
}
|
||||
|
@ -2112,7 +2162,7 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
|||
forceValue(v, pos);
|
||||
if (v.type() != nString)
|
||||
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
|
||||
return v.string.s;
|
||||
return v.string_view();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
|
@ -2139,8 +2189,8 @@ std::string_view EvalState::forceString(Value & v, NixStringContext & context, c
|
|||
std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
auto s = forceString(v, pos, errorCtx);
|
||||
if (v.string.context) {
|
||||
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string.s, v.string.context[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
if (v.context()) {
|
||||
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
@ -2153,7 +2203,7 @@ bool EvalState::isDerivation(Value & v)
|
|||
if (i == v.attrs->end()) return false;
|
||||
forceValue(*i->value, i->pos);
|
||||
if (i->value->type() != nString) return false;
|
||||
return strcmp(i->value->string.s, "derivation") == 0;
|
||||
return i->value->string_view().compare("derivation") == 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -2185,7 +2235,7 @@ BackedStringView EvalState::coerceToString(
|
|||
|
||||
if (v.type() == nString) {
|
||||
copyContext(v, context);
|
||||
return std::string_view(v.string.s);
|
||||
return v.string_view();
|
||||
}
|
||||
|
||||
if (v.type() == nPath) {
|
||||
|
@ -2193,7 +2243,7 @@ BackedStringView EvalState::coerceToString(
|
|||
!canonicalizePath && !copyToStore
|
||||
? // FIXME: hack to preserve path literals that end in a
|
||||
// slash, as in /foo/${x}.
|
||||
v._path
|
||||
v._path.path
|
||||
: copyToStore
|
||||
? store->printStorePath(copyPathToStore(context, v.path()))
|
||||
: std::string(v.path().path.abs());
|
||||
|
@ -2247,7 +2297,7 @@ BackedStringView EvalState::coerceToString(
|
|||
&& (!v2->isList() || v2->listSize() != 0))
|
||||
result += " ";
|
||||
}
|
||||
return std::move(result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2267,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
|
|||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = path.fetchToStore(store, path.baseName(), nullptr, repair);
|
||||
auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
|
@ -2283,10 +2333,34 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
|
|||
|
||||
SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx)
|
||||
{
|
||||
try {
|
||||
forceValue(v, pos);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
}
|
||||
|
||||
/* Handle path values directly, without coercing to a string. */
|
||||
if (v.type() == nPath)
|
||||
return v.path();
|
||||
|
||||
/* Similarly, handle __toString where the result may be a path
|
||||
value. */
|
||||
if (v.type() == nAttrs) {
|
||||
auto i = v.attrs->find(sToString);
|
||||
if (i != v.attrs->end()) {
|
||||
Value v1;
|
||||
callFunction(*i->value, v, v1, pos);
|
||||
return coerceToPath(pos, v1, context, errorCtx);
|
||||
}
|
||||
}
|
||||
|
||||
/* Any other value should be coercable to a string, interpreted
|
||||
relative to the root filesystem. */
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
return CanonPath(path);
|
||||
return rootPath(CanonPath(path));
|
||||
}
|
||||
|
||||
|
||||
|
@ -2299,7 +2373,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon
|
|||
}
|
||||
|
||||
|
||||
std::pair<DerivedPath, std::string_view> EvalState::coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx)
|
||||
std::pair<SingleDerivedPath, std::string_view> EvalState::coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx)
|
||||
{
|
||||
NixStringContext context;
|
||||
auto s = forceString(v, context, pos, errorCtx);
|
||||
|
@ -2310,23 +2384,18 @@ std::pair<DerivedPath, std::string_view> EvalState::coerceToDerivedPathUnchecked
|
|||
s, csize)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
auto derivedPath = std::visit(overloaded {
|
||||
[&](NixStringContextElem::Opaque && o) -> DerivedPath {
|
||||
return DerivedPath::Opaque {
|
||||
.path = std::move(o.path),
|
||||
};
|
||||
[&](NixStringContextElem::Opaque && o) -> SingleDerivedPath {
|
||||
return std::move(o);
|
||||
},
|
||||
[&](NixStringContextElem::DrvDeep &&) -> DerivedPath {
|
||||
[&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath {
|
||||
error(
|
||||
"string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time",
|
||||
s).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
},
|
||||
[&](NixStringContextElem::Built && b) -> DerivedPath {
|
||||
return DerivedPath::Built {
|
||||
.drvPath = std::move(b.drvPath),
|
||||
.outputs = OutputsSpec::Names { std::move(b.output) },
|
||||
};
|
||||
[&](NixStringContextElem::Built && b) -> SingleDerivedPath {
|
||||
return std::move(b);
|
||||
},
|
||||
}, ((NixStringContextElem &&) *context.begin()).raw());
|
||||
}, ((NixStringContextElem &&) *context.begin()).raw);
|
||||
return {
|
||||
std::move(derivedPath),
|
||||
std::move(s),
|
||||
|
@ -2334,41 +2403,29 @@ std::pair<DerivedPath, std::string_view> EvalState::coerceToDerivedPathUnchecked
|
|||
}
|
||||
|
||||
|
||||
DerivedPath EvalState::coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx)
|
||||
SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx)
|
||||
{
|
||||
auto [derivedPath, s_] = coerceToDerivedPathUnchecked(pos, v, errorCtx);
|
||||
auto [derivedPath, s_] = coerceToSingleDerivedPathUnchecked(pos, v, errorCtx);
|
||||
auto s = s_;
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Opaque & o) {
|
||||
auto sExpected = store->printStorePath(o.path);
|
||||
if (s != sExpected)
|
||||
auto sExpected = mkSingleDerivedPathStringRaw(derivedPath);
|
||||
if (s != sExpected) {
|
||||
/* `std::visit` is used here just to provide a more precise
|
||||
error message. */
|
||||
std::visit(overloaded {
|
||||
[&](const SingleDerivedPath::Opaque & o) {
|
||||
error(
|
||||
"path string '%s' has context with the different path '%s'",
|
||||
s, sExpected)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
},
|
||||
[&](const DerivedPath::Built & b) {
|
||||
// TODO need derived path with single output to make this
|
||||
// total. Will add as part of RFC 92 work and then this is
|
||||
// cleaned up.
|
||||
auto output = *std::get<OutputsSpec::Names>(b.outputs).begin();
|
||||
|
||||
auto drv = store->readDerivation(b.drvPath);
|
||||
auto i = drv.outputs.find(output);
|
||||
if (i == drv.outputs.end())
|
||||
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(b.drvPath), output);
|
||||
auto optOutputPath = i->second.path(*store, drv.name, output);
|
||||
// This is testing for the case of CA derivations
|
||||
auto sExpected = optOutputPath
|
||||
? store->printStorePath(*optOutputPath)
|
||||
: DownstreamPlaceholder::unknownCaOutput(b.drvPath, output).render();
|
||||
if (s != sExpected)
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & b) {
|
||||
error(
|
||||
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
|
||||
s, output, store->printStorePath(b.drvPath), sExpected)
|
||||
s, b.output, b.drvPath->to_string(*store), sExpected)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
}
|
||||
}, derivedPath.raw());
|
||||
}
|
||||
}, derivedPath.raw());
|
||||
}
|
||||
return derivedPath;
|
||||
}
|
||||
|
||||
|
@ -2400,10 +2457,13 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
return v1.boolean == v2.boolean;
|
||||
|
||||
case nString:
|
||||
return strcmp(v1.string.s, v2.string.s) == 0;
|
||||
return v1.string_view().compare(v2.string_view()) == 0;
|
||||
|
||||
case nPath:
|
||||
return strcmp(v1._path, v2._path) == 0;
|
||||
return
|
||||
// FIXME: compare accessors by their fingerprint.
|
||||
v1._path.accessor == v2._path.accessor
|
||||
&& strcmp(v1._path.path, v2._path.path) == 0;
|
||||
|
||||
case nNull:
|
||||
return true;
|
||||
|
@ -2451,10 +2511,37 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
}
|
||||
}
|
||||
|
||||
void EvalState::printStats()
|
||||
bool EvalState::fullGC() {
|
||||
#if HAVE_BOEHMGC
|
||||
GC_gcollect();
|
||||
// Check that it ran. We might replace this with a version that uses more
|
||||
// of the boehm API to get this reliably, at a maintenance cost.
|
||||
// We use a 1K margin because technically this has a race condtion, but we
|
||||
// probably won't encounter it in practice, because the CLI isn't concurrent
|
||||
// like that.
|
||||
return GC_get_bytes_since_gc() < 1024;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void EvalState::maybePrintStats()
|
||||
{
|
||||
bool showStats = getEnv("NIX_SHOW_STATS").value_or("0") != "0";
|
||||
|
||||
if (showStats) {
|
||||
// Make the final heap size more deterministic.
|
||||
#if HAVE_BOEHMGC
|
||||
if (!fullGC()) {
|
||||
warn("failed to perform a full GC before reporting stats");
|
||||
}
|
||||
#endif
|
||||
printStatistics();
|
||||
}
|
||||
}
|
||||
|
||||
void EvalState::printStatistics()
|
||||
{
|
||||
struct rusage buf;
|
||||
getrusage(RUSAGE_SELF, &buf);
|
||||
float cpuTime = buf.ru_utime.tv_sec + ((float) buf.ru_utime.tv_usec / 1000000);
|
||||
|
@ -2468,105 +2555,105 @@ void EvalState::printStats()
|
|||
GC_word heapSize, totalBytes;
|
||||
GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes);
|
||||
#endif
|
||||
if (showStats) {
|
||||
auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-");
|
||||
std::fstream fs;
|
||||
if (outPath != "-")
|
||||
fs.open(outPath, std::fstream::out);
|
||||
json topObj = json::object();
|
||||
topObj["cpuTime"] = cpuTime;
|
||||
topObj["envs"] = {
|
||||
{"number", nrEnvs},
|
||||
{"elements", nrValuesInEnvs},
|
||||
{"bytes", bEnvs},
|
||||
};
|
||||
topObj["list"] = {
|
||||
{"elements", nrListElems},
|
||||
{"bytes", bLists},
|
||||
{"concats", nrListConcats},
|
||||
};
|
||||
topObj["values"] = {
|
||||
{"number", nrValues},
|
||||
{"bytes", bValues},
|
||||
};
|
||||
topObj["symbols"] = {
|
||||
{"number", symbols.size()},
|
||||
{"bytes", symbols.totalSize()},
|
||||
};
|
||||
topObj["sets"] = {
|
||||
{"number", nrAttrsets},
|
||||
{"bytes", bAttrsets},
|
||||
{"elements", nrAttrsInAttrsets},
|
||||
};
|
||||
topObj["sizes"] = {
|
||||
{"Env", sizeof(Env)},
|
||||
{"Value", sizeof(Value)},
|
||||
{"Bindings", sizeof(Bindings)},
|
||||
{"Attr", sizeof(Attr)},
|
||||
};
|
||||
topObj["nrOpUpdates"] = nrOpUpdates;
|
||||
topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
|
||||
topObj["nrThunks"] = nrThunks;
|
||||
topObj["nrAvoided"] = nrAvoided;
|
||||
topObj["nrLookups"] = nrLookups;
|
||||
topObj["nrPrimOpCalls"] = nrPrimOpCalls;
|
||||
topObj["nrFunctionCalls"] = nrFunctionCalls;
|
||||
|
||||
auto outPath = getEnv("NIX_SHOW_STATS_PATH").value_or("-");
|
||||
std::fstream fs;
|
||||
if (outPath != "-")
|
||||
fs.open(outPath, std::fstream::out);
|
||||
json topObj = json::object();
|
||||
topObj["cpuTime"] = cpuTime;
|
||||
topObj["envs"] = {
|
||||
{"number", nrEnvs},
|
||||
{"elements", nrValuesInEnvs},
|
||||
{"bytes", bEnvs},
|
||||
};
|
||||
topObj["nrExprs"] = Expr::nrExprs;
|
||||
topObj["list"] = {
|
||||
{"elements", nrListElems},
|
||||
{"bytes", bLists},
|
||||
{"concats", nrListConcats},
|
||||
};
|
||||
topObj["values"] = {
|
||||
{"number", nrValues},
|
||||
{"bytes", bValues},
|
||||
};
|
||||
topObj["symbols"] = {
|
||||
{"number", symbols.size()},
|
||||
{"bytes", symbols.totalSize()},
|
||||
};
|
||||
topObj["sets"] = {
|
||||
{"number", nrAttrsets},
|
||||
{"bytes", bAttrsets},
|
||||
{"elements", nrAttrsInAttrsets},
|
||||
};
|
||||
topObj["sizes"] = {
|
||||
{"Env", sizeof(Env)},
|
||||
{"Value", sizeof(Value)},
|
||||
{"Bindings", sizeof(Bindings)},
|
||||
{"Attr", sizeof(Attr)},
|
||||
};
|
||||
topObj["nrOpUpdates"] = nrOpUpdates;
|
||||
topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
|
||||
topObj["nrThunks"] = nrThunks;
|
||||
topObj["nrAvoided"] = nrAvoided;
|
||||
topObj["nrLookups"] = nrLookups;
|
||||
topObj["nrPrimOpCalls"] = nrPrimOpCalls;
|
||||
topObj["nrFunctionCalls"] = nrFunctionCalls;
|
||||
#if HAVE_BOEHMGC
|
||||
topObj["gc"] = {
|
||||
{"heapSize", heapSize},
|
||||
{"totalBytes", totalBytes},
|
||||
};
|
||||
topObj["gc"] = {
|
||||
{"heapSize", heapSize},
|
||||
{"totalBytes", totalBytes},
|
||||
};
|
||||
#endif
|
||||
|
||||
if (countCalls) {
|
||||
topObj["primops"] = primOpCalls;
|
||||
{
|
||||
auto& list = topObj["functions"];
|
||||
list = json::array();
|
||||
for (auto & [fun, count] : functionCalls) {
|
||||
json obj = json::object();
|
||||
if (fun->name)
|
||||
obj["name"] = (std::string_view) symbols[fun->name];
|
||||
else
|
||||
obj["name"] = nullptr;
|
||||
if (auto pos = positions[fun->pos]) {
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj["file"] = path->to_string();
|
||||
obj["line"] = pos.line;
|
||||
obj["column"] = pos.column;
|
||||
}
|
||||
obj["count"] = count;
|
||||
list.push_back(obj);
|
||||
}
|
||||
}
|
||||
{
|
||||
auto list = topObj["attributes"];
|
||||
list = json::array();
|
||||
for (auto & i : attrSelects) {
|
||||
json obj = json::object();
|
||||
if (auto pos = positions[i.first]) {
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj["file"] = path->to_string();
|
||||
obj["line"] = pos.line;
|
||||
obj["column"] = pos.column;
|
||||
}
|
||||
obj["count"] = i.second;
|
||||
list.push_back(obj);
|
||||
if (countCalls) {
|
||||
topObj["primops"] = primOpCalls;
|
||||
{
|
||||
auto& list = topObj["functions"];
|
||||
list = json::array();
|
||||
for (auto & [fun, count] : functionCalls) {
|
||||
json obj = json::object();
|
||||
if (fun->name)
|
||||
obj["name"] = (std::string_view) symbols[fun->name];
|
||||
else
|
||||
obj["name"] = nullptr;
|
||||
if (auto pos = positions[fun->pos]) {
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj["file"] = path->to_string();
|
||||
obj["line"] = pos.line;
|
||||
obj["column"] = pos.column;
|
||||
}
|
||||
obj["count"] = count;
|
||||
list.push_back(obj);
|
||||
}
|
||||
}
|
||||
{
|
||||
auto list = topObj["attributes"];
|
||||
list = json::array();
|
||||
for (auto & i : attrSelects) {
|
||||
json obj = json::object();
|
||||
if (auto pos = positions[i.first]) {
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
obj["file"] = path->to_string();
|
||||
obj["line"] = pos.line;
|
||||
obj["column"] = pos.column;
|
||||
}
|
||||
obj["count"] = i.second;
|
||||
list.push_back(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
|
||||
// XXX: overrides earlier assignment
|
||||
topObj["symbols"] = json::array();
|
||||
auto &list = topObj["symbols"];
|
||||
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
|
||||
}
|
||||
if (outPath == "-") {
|
||||
std::cerr << topObj.dump(2) << std::endl;
|
||||
} else {
|
||||
fs << topObj.dump(2) << std::endl;
|
||||
}
|
||||
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
|
||||
// XXX: overrides earlier assignment
|
||||
topObj["symbols"] = json::array();
|
||||
auto &list = topObj["symbols"];
|
||||
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
|
||||
}
|
||||
if (outPath == "-") {
|
||||
std::cerr << topObj.dump(2) << std::endl;
|
||||
} else {
|
||||
fs << topObj.dump(2) << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,8 +22,10 @@ namespace nix {
|
|||
class Store;
|
||||
class EvalState;
|
||||
class StorePath;
|
||||
struct DerivedPath;
|
||||
struct SingleDerivedPath;
|
||||
enum RepairFlag : bool;
|
||||
struct FSInputAccessor;
|
||||
struct MemoryInputAccessor;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -211,8 +213,26 @@ public:
|
|||
|
||||
Bindings emptyBindings;
|
||||
|
||||
/**
|
||||
* The accessor for the root filesystem.
|
||||
*/
|
||||
const ref<FSInputAccessor> rootFS;
|
||||
|
||||
/**
|
||||
* The in-memory filesystem for <nix/...> paths.
|
||||
*/
|
||||
const ref<MemoryInputAccessor> corepkgsFS;
|
||||
|
||||
/**
|
||||
* In-memory filesystem for internal, non-user-callable Nix
|
||||
* expressions like call-flake.nix.
|
||||
*/
|
||||
const ref<MemoryInputAccessor> internalFS;
|
||||
|
||||
const SourcePath derivationInternal;
|
||||
|
||||
const SourcePath callFlakeInternal;
|
||||
|
||||
/**
|
||||
* Store used to materialise .drv files.
|
||||
*/
|
||||
|
@ -223,7 +243,6 @@ public:
|
|||
*/
|
||||
const ref<Store> buildStore;
|
||||
|
||||
RootValue vCallFlake = nullptr;
|
||||
RootValue vImportedDrvToDerivation = nullptr;
|
||||
|
||||
/**
|
||||
|
@ -341,8 +360,6 @@ public:
|
|||
std::shared_ptr<Store> buildStore = nullptr);
|
||||
~EvalState();
|
||||
|
||||
void addToSearchPath(SearchPath::Elem && elem);
|
||||
|
||||
SearchPath getSearchPath() { return searchPath; }
|
||||
|
||||
/**
|
||||
|
@ -407,16 +424,6 @@ public:
|
|||
*/
|
||||
void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false);
|
||||
|
||||
/**
|
||||
* Like `evalFile`, but with an already parsed expression.
|
||||
*/
|
||||
void cacheFile(
|
||||
const SourcePath & path,
|
||||
const SourcePath & resolvedPath,
|
||||
Expr * e,
|
||||
Value & v,
|
||||
bool mustBeTrivial = false);
|
||||
|
||||
void resetFileCache();
|
||||
|
||||
/**
|
||||
|
@ -426,7 +433,7 @@ public:
|
|||
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
|
||||
|
||||
/**
|
||||
* Try to resolve a search path value (not the optinal key part)
|
||||
* Try to resolve a search path value (not the optional key part)
|
||||
*
|
||||
* If the specified search path element is a URI, download it.
|
||||
*
|
||||
|
@ -532,12 +539,12 @@ public:
|
|||
StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx);
|
||||
|
||||
/**
|
||||
* Part of `coerceToDerivedPath()` without any store IO which is exposed for unit testing only.
|
||||
* Part of `coerceToSingleDerivedPath()` without any store IO which is exposed for unit testing only.
|
||||
*/
|
||||
std::pair<DerivedPath, std::string_view> coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx);
|
||||
std::pair<SingleDerivedPath, std::string_view> coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx);
|
||||
|
||||
/**
|
||||
* Coerce to `DerivedPath`.
|
||||
* Coerce to `SingleDerivedPath`.
|
||||
*
|
||||
* Must be a string which is either a literal store path or a
|
||||
* "placeholder (see `DownstreamPlaceholder`).
|
||||
|
@ -551,7 +558,7 @@ public:
|
|||
* source of truth, and ultimately tells us what we want, and then
|
||||
* we ensure the string corresponds to it.
|
||||
*/
|
||||
DerivedPath coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx);
|
||||
SingleDerivedPath coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -668,43 +675,68 @@ public:
|
|||
/**
|
||||
* Create a string representing a store path.
|
||||
*
|
||||
* The string is the printed store path with a context containing a single
|
||||
* `NixStringContextElem::Opaque` element of that store path.
|
||||
* The string is the printed store path with a context containing a
|
||||
* single `NixStringContextElem::Opaque` element of that store path.
|
||||
*/
|
||||
void mkStorePathString(const StorePath & storePath, Value & v);
|
||||
|
||||
/**
|
||||
* Create a string representing a `DerivedPath::Built`.
|
||||
* Create a string representing a `SingleDerivedPath::Built`.
|
||||
*
|
||||
* The string is the printed store path with a context containing a single
|
||||
* `NixStringContextElem::Built` element of the drv path and output name.
|
||||
* The string is the printed store path with a context containing a
|
||||
* single `NixStringContextElem::Built` element of the drv path and
|
||||
* output name.
|
||||
*
|
||||
* @param value Value we are settings
|
||||
*
|
||||
* @param drvPath Path the drv whose output we are making a string for
|
||||
* @param b the drv whose output we are making a string for, and the
|
||||
* output
|
||||
*
|
||||
* @param outputName Name of the output
|
||||
*
|
||||
* @param optOutputPath Optional output path for that string. Must
|
||||
* be passed if and only if output store object is input-addressed.
|
||||
* Will be printed to form string if passed, otherwise a placeholder
|
||||
* will be used (see `DownstreamPlaceholder`).
|
||||
* @param optStaticOutputPath Optional output path for that string.
|
||||
* Must be passed if and only if output store object is
|
||||
* input-addressed or fixed output. Will be printed to form string
|
||||
* if passed, otherwise a placeholder will be used (see
|
||||
* `DownstreamPlaceholder`).
|
||||
*
|
||||
* @param xpSettings Stop-gap to avoid globals during unit tests.
|
||||
*/
|
||||
void mkOutputString(
|
||||
Value & value,
|
||||
const StorePath & drvPath,
|
||||
const std::string outputName,
|
||||
std::optional<StorePath> optOutputPath,
|
||||
const SingleDerivedPath::Built & b,
|
||||
std::optional<StorePath> optStaticOutputPath,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Create a string representing a `SingleDerivedPath`.
|
||||
*
|
||||
* A combination of `mkStorePathString` and `mkOutputString`.
|
||||
*/
|
||||
void mkSingleDerivedPathString(
|
||||
const SingleDerivedPath & p,
|
||||
Value & v);
|
||||
|
||||
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
/**
|
||||
* Print statistics.
|
||||
* Print statistics, if enabled.
|
||||
*
|
||||
* Performs a full memory GC before printing the statistics, so that the
|
||||
* GC statistics are more accurate.
|
||||
*/
|
||||
void printStats();
|
||||
void maybePrintStats();
|
||||
|
||||
/**
|
||||
* Print statistics, unconditionally, cheaply, without performing a GC first.
|
||||
*/
|
||||
void printStatistics();
|
||||
|
||||
/**
|
||||
* Perform a full memory garbage collection - not incremental.
|
||||
*
|
||||
* @return true if Nix was built with GC and a GC was performed, false if not.
|
||||
* The return value is currently not thread safe - just the return value.
|
||||
*/
|
||||
bool fullGC();
|
||||
|
||||
/**
|
||||
* Realise the given context, and return a mapping from the placeholders
|
||||
|
@ -714,6 +746,22 @@ public:
|
|||
|
||||
private:
|
||||
|
||||
/**
|
||||
* Like `mkOutputString` but just creates a raw string, not an
|
||||
* string Value, which would also have a string context.
|
||||
*/
|
||||
std::string mkOutputStringRaw(
|
||||
const SingleDerivedPath::Built & b,
|
||||
std::optional<StorePath> optStaticOutputPath,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Like `mkSingleDerivedPathStringRaw` but just creates a raw string
|
||||
* Value, which would also have a string context.
|
||||
*/
|
||||
std::string mkSingleDerivedPathStringRaw(
|
||||
const SingleDerivedPath & p);
|
||||
|
||||
unsigned long nrEnvs = 0;
|
||||
unsigned long nrValuesInEnvs = 0;
|
||||
unsigned long nrValues = 0;
|
||||
|
@ -790,8 +838,6 @@ struct InvalidPathError : EvalError
|
|||
#endif
|
||||
};
|
||||
|
||||
static const std::string corepkgsPrefix{"/__corepkgs__/"};
|
||||
|
||||
template<class ErrorType>
|
||||
void ErrorBuilder::debugThrow()
|
||||
{
|
||||
|
|
|
@ -15,7 +15,7 @@ using namespace flake;
|
|||
|
||||
namespace flake {
|
||||
|
||||
typedef std::pair<fetchers::Tree, FlakeRef> FetchedFlake;
|
||||
typedef std::pair<StorePath, FlakeRef> FetchedFlake;
|
||||
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
|
||||
|
||||
static std::optional<FetchedFlake> lookupInFlakeCache(
|
||||
|
@ -34,7 +34,7 @@ static std::optional<FetchedFlake> lookupInFlakeCache(
|
|||
return std::nullopt;
|
||||
}
|
||||
|
||||
static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
||||
static std::tuple<StorePath, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
||||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool allowLookup,
|
||||
|
@ -61,16 +61,16 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
|||
flakeCache.push_back({originalRef, *fetched});
|
||||
}
|
||||
|
||||
auto [tree, lockedRef] = *fetched;
|
||||
auto [storePath, lockedRef] = *fetched;
|
||||
|
||||
debug("got tree '%s' from '%s'",
|
||||
state.store->printStorePath(tree.storePath), lockedRef);
|
||||
state.store->printStorePath(storePath), lockedRef);
|
||||
|
||||
state.allowPath(tree.storePath);
|
||||
state.allowPath(storePath);
|
||||
|
||||
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
|
||||
assert(!originalRef.input.getNarHash() || storePath == originalRef.input.computeStorePath(*state.store));
|
||||
|
||||
return {std::move(tree), resolvedRef, lockedRef};
|
||||
return {std::move(storePath), resolvedRef, lockedRef};
|
||||
}
|
||||
|
||||
static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos)
|
||||
|
@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
try {
|
||||
if (attr.name == sUrl) {
|
||||
expectType(state, nString, *attr.value, attr.pos);
|
||||
url = attr.value->string.s;
|
||||
url = attr.value->string_view();
|
||||
attrs.emplace("url", *url);
|
||||
} else if (attr.name == sFlake) {
|
||||
expectType(state, nBool, *attr.value, attr.pos);
|
||||
|
@ -122,7 +122,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, attr.pos);
|
||||
auto follows(parseInputPath(attr.value->string.s));
|
||||
auto follows(parseInputPath(attr.value->c_str()));
|
||||
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||
input.follows = follows;
|
||||
} else {
|
||||
|
@ -131,7 +131,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
switch (attr.value->type()) {
|
||||
case nString:
|
||||
attrs.emplace(state.symbols[attr.name], attr.value->string.s);
|
||||
attrs.emplace(state.symbols[attr.name], attr.value->c_str());
|
||||
break;
|
||||
case nBool:
|
||||
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
|
||||
|
@ -202,34 +202,34 @@ static Flake getFlake(
|
|||
FlakeCache & flakeCache,
|
||||
InputPath lockRootPath)
|
||||
{
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, originalRef, allowLookup, flakeCache);
|
||||
|
||||
// Guard against symlink attacks.
|
||||
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true);
|
||||
auto flakeDir = canonPath(state.store->toRealPath(storePath) + "/" + lockedRef.subdir, true);
|
||||
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
|
||||
if (!isInDir(flakeFile, sourceInfo.actualPath))
|
||||
if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
|
||||
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
|
||||
lockedRef, state.store->printStorePath(sourceInfo.storePath));
|
||||
lockedRef, state.store->printStorePath(storePath));
|
||||
|
||||
Flake flake {
|
||||
.originalRef = originalRef,
|
||||
.resolvedRef = resolvedRef,
|
||||
.lockedRef = lockedRef,
|
||||
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
|
||||
.storePath = storePath,
|
||||
};
|
||||
|
||||
if (!pathExists(flakeFile))
|
||||
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
|
||||
|
||||
Value vInfo;
|
||||
state.evalFile(CanonPath(flakeFile), vInfo, true); // FIXME: symlink attack
|
||||
state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
|
||||
|
||||
expectType(state, nAttrs, vInfo, state.positions.add({CanonPath(flakeFile)}, 1, 1));
|
||||
expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
|
||||
|
||||
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
||||
expectType(state, nString, *description->value, description->pos);
|
||||
flake.description = description->value->string.s;
|
||||
flake.description = description->value->c_str();
|
||||
}
|
||||
|
||||
auto sInputs = state.symbols.create("inputs");
|
||||
|
@ -346,7 +346,7 @@ LockedFlake lockFlake(
|
|||
// FIXME: symlink attack
|
||||
auto oldLockFile = LockFile::read(
|
||||
lockFlags.referenceLockFilePath.value_or(
|
||||
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock"));
|
||||
state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock"));
|
||||
|
||||
debug("old lock file: %s", oldLockFile);
|
||||
|
||||
|
@ -520,11 +520,6 @@ LockedFlake lockFlake(
|
|||
}
|
||||
}
|
||||
|
||||
auto localPath(parentPath);
|
||||
// If this input is a path, recurse it down.
|
||||
// This allows us to resolve path inputs relative to the current flake.
|
||||
if ((*input.ref).input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
computeLocks(
|
||||
mustRefetch
|
||||
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
|
||||
|
@ -579,7 +574,7 @@ LockedFlake lockFlake(
|
|||
oldLock
|
||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: LockFile::read(
|
||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
|
||||
state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
|
||||
oldLock ? lockRootPath : inputPath,
|
||||
localPath,
|
||||
false);
|
||||
|
@ -603,7 +598,7 @@ LockedFlake lockFlake(
|
|||
};
|
||||
|
||||
// Bring in the current ref for relative path resolution if we have it
|
||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
||||
auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
|
||||
|
||||
computeLocks(
|
||||
flake.inputs,
|
||||
|
@ -734,7 +729,7 @@ void callFlake(EvalState & state,
|
|||
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
*lockedFlake.flake.sourceInfo,
|
||||
lockedFlake.flake.storePath,
|
||||
lockedFlake.flake.lockedRef.input,
|
||||
*vRootSrc,
|
||||
false,
|
||||
|
@ -742,14 +737,10 @@ void callFlake(EvalState & state,
|
|||
|
||||
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
|
||||
|
||||
if (!state.vCallFlake) {
|
||||
state.vCallFlake = allocRootValue(state.allocValue());
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "call-flake.nix.gen.hh"
|
||||
, CanonPath::root), **state.vCallFlake);
|
||||
}
|
||||
auto vCallFlake = state.allocValue();
|
||||
state.evalFile(state.callFlakeInternal, *vCallFlake);
|
||||
|
||||
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
|
||||
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
|
||||
}
|
||||
|
@ -855,7 +846,7 @@ static void prim_flakeRefToString(
|
|||
Explicit<bool> { attr.value->boolean });
|
||||
} else if (t == nString) {
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
std::string(attr.value->str()));
|
||||
std::string(attr.value->string_view()));
|
||||
} else {
|
||||
state.error(
|
||||
"flake reference attribute sets may only contain integers, Booleans, "
|
||||
|
@ -898,7 +889,7 @@ Fingerprint LockedFlake::getFingerprint() const
|
|||
// flake.sourceInfo.storePath for the fingerprint.
|
||||
return hashString(htSHA256,
|
||||
fmt("%s;%s;%d;%d;%s",
|
||||
flake.sourceInfo->storePath.to_string(),
|
||||
flake.storePath.to_string(),
|
||||
flake.lockedRef.subdir,
|
||||
flake.lockedRef.input.getRevCount().value_or(0),
|
||||
flake.lockedRef.input.getLastModified().value_or(0),
|
||||
|
|
|
@ -10,8 +10,6 @@ namespace nix {
|
|||
|
||||
class EvalState;
|
||||
|
||||
namespace fetchers { struct Tree; }
|
||||
|
||||
namespace flake {
|
||||
|
||||
struct FlakeInput;
|
||||
|
@ -84,7 +82,7 @@ struct Flake
|
|||
*/
|
||||
bool forceDirty = false;
|
||||
std::optional<std::string> description;
|
||||
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
||||
StorePath storePath;
|
||||
FlakeInputs inputs;
|
||||
/**
|
||||
* 'nixConfig' attribute
|
||||
|
@ -193,7 +191,7 @@ void callFlake(
|
|||
|
||||
void emitTreeAttrs(
|
||||
EvalState & state,
|
||||
const fetchers::Tree & tree,
|
||||
const StorePath & storePath,
|
||||
const fetchers::Input & input,
|
||||
Value & v,
|
||||
bool emptyRevFallback = false,
|
||||
|
|
|
@ -69,32 +69,130 @@ std::optional<FlakeRef> maybeParseFlakeRef(
|
|||
}
|
||||
}
|
||||
|
||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||
std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
using namespace fetchers;
|
||||
std::string path = url;
|
||||
std::string fragment = "";
|
||||
std::map<std::string, std::string> query;
|
||||
auto pathEnd = url.find_first_of("#?");
|
||||
auto fragmentStart = pathEnd;
|
||||
if (pathEnd != std::string::npos && url[pathEnd] == '?') {
|
||||
fragmentStart = url.find("#");
|
||||
}
|
||||
if (pathEnd != std::string::npos) {
|
||||
path = url.substr(0, pathEnd);
|
||||
}
|
||||
if (fragmentStart != std::string::npos) {
|
||||
fragment = percentDecode(url.substr(fragmentStart+1));
|
||||
}
|
||||
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
|
||||
query = decodeQuery(url.substr(pathEnd+1, fragmentStart));
|
||||
}
|
||||
|
||||
static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+";
|
||||
if (baseDir) {
|
||||
/* Check if 'url' is a path (either absolute or relative
|
||||
to 'baseDir'). If so, search upward to the root of the
|
||||
repo (i.e. the directory containing .git). */
|
||||
|
||||
static std::regex pathUrlRegex(
|
||||
"(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)"
|
||||
+ "(?:\\?(" + queryRegex + "))?"
|
||||
+ "(?:#(" + queryRegex + "))?",
|
||||
std::regex::ECMAScript);
|
||||
path = absPath(path, baseDir);
|
||||
|
||||
if (isFlake) {
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||
|
||||
// Save device to detect filesystem boundary
|
||||
dev_t device = lstat(path).st_dev;
|
||||
bool found = false;
|
||||
while (path != "/") {
|
||||
if (pathExists(path + "/flake.nix")) {
|
||||
found = true;
|
||||
break;
|
||||
} else if (pathExists(path + "/.git"))
|
||||
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||
else {
|
||||
if (lstat(path).st_dev != device)
|
||||
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||
}
|
||||
path = dirOf(path);
|
||||
}
|
||||
if (!found)
|
||||
throw BadURL("could not find a flake.nix file");
|
||||
}
|
||||
|
||||
if (!S_ISDIR(lstat(path).st_mode))
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||
|
||||
auto flakeRoot = path;
|
||||
std::string subdir;
|
||||
|
||||
while (flakeRoot != "/") {
|
||||
if (pathExists(flakeRoot + "/.git")) {
|
||||
auto base = std::string("git+file://") + flakeRoot;
|
||||
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = base, // FIXME
|
||||
.base = base,
|
||||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = query,
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
if (parsedURL.query.count("dir"))
|
||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||
parsedURL.query.insert_or_assign("dir", subdir);
|
||||
}
|
||||
|
||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(fetchers::Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
|
||||
fragment);
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
flakeRoot = dirOf(flakeRoot);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
if (!hasPrefix(path, "/"))
|
||||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||
path = canonPath(path + "/" + getOr(query, "dir", ""));
|
||||
}
|
||||
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "path");
|
||||
attrs.insert_or_assign("path", path);
|
||||
|
||||
return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(std::move(attrs)), ""), fragment);
|
||||
};
|
||||
|
||||
|
||||
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
||||
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
||||
std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
|
||||
const std::string & url,
|
||||
bool isFlake
|
||||
)
|
||||
{
|
||||
std::smatch match;
|
||||
|
||||
static std::regex flakeRegex(
|
||||
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
||||
+ "(?:#(" + queryRegex + "))?",
|
||||
std::regex::ECMAScript);
|
||||
|
||||
std::smatch match;
|
||||
|
||||
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
||||
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
||||
|
||||
if (std::regex_match(url, match, flakeRegex)) {
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = url,
|
||||
|
@ -105,111 +203,53 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
};
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL, isFlake), ""),
|
||||
FlakeRef(fetchers::Input::fromURL(parsedURL, isFlake), ""),
|
||||
percentDecode(match.str(6)));
|
||||
}
|
||||
|
||||
else if (std::regex_match(url, match, pathUrlRegex)) {
|
||||
std::string path = match[1];
|
||||
std::string fragment = percentDecode(match.str(3));
|
||||
return {};
|
||||
}
|
||||
|
||||
if (baseDir) {
|
||||
/* Check if 'url' is a path (either absolute or relative
|
||||
to 'baseDir'). If so, search upward to the root of the
|
||||
repo (i.e. the directory containing .git). */
|
||||
|
||||
path = absPath(path, baseDir);
|
||||
|
||||
if (isFlake) {
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||
|
||||
// Save device to detect filesystem boundary
|
||||
dev_t device = lstat(path).st_dev;
|
||||
bool found = false;
|
||||
while (path != "/") {
|
||||
if (pathExists(path + "/flake.nix")) {
|
||||
found = true;
|
||||
break;
|
||||
} else if (pathExists(path + "/.git"))
|
||||
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||
else {
|
||||
if (lstat(path).st_dev != device)
|
||||
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||
}
|
||||
path = dirOf(path);
|
||||
}
|
||||
if (!found)
|
||||
throw BadURL("could not find a flake.nix file");
|
||||
}
|
||||
|
||||
if (!S_ISDIR(lstat(path).st_mode))
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||
|
||||
auto flakeRoot = path;
|
||||
std::string subdir;
|
||||
|
||||
while (flakeRoot != "/") {
|
||||
if (pathExists(flakeRoot + "/.git")) {
|
||||
auto base = std::string("git+file://") + flakeRoot;
|
||||
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = base, // FIXME
|
||||
.base = base,
|
||||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = decodeQuery(match[2]),
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
if (parsedURL.query.count("dir"))
|
||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||
parsedURL.query.insert_or_assign("dir", subdir);
|
||||
}
|
||||
|
||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL, isFlake), getOr(parsedURL.query, "dir", "")),
|
||||
fragment);
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
flakeRoot = dirOf(flakeRoot);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
if (!hasPrefix(path, "/"))
|
||||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||
auto query = decodeQuery(match[2]);
|
||||
path = canonPath(path + "/" + getOr(query, "dir", ""));
|
||||
}
|
||||
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "path");
|
||||
attrs.insert_or_assign("path", path);
|
||||
|
||||
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
|
||||
std::optional<std::pair<FlakeRef, std::string>> parseURLFlakeRef(
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool isFlake
|
||||
)
|
||||
{
|
||||
ParsedURL parsedURL;
|
||||
try {
|
||||
parsedURL = parseURL(url);
|
||||
} catch (BadURL &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
else {
|
||||
auto parsedURL = parseURL(url);
|
||||
std::string fragment;
|
||||
std::swap(fragment, parsedURL.fragment);
|
||||
std::string fragment;
|
||||
std::swap(fragment, parsedURL.fragment);
|
||||
|
||||
auto input = Input::fromURL(parsedURL, isFlake);
|
||||
input.parent = baseDir;
|
||||
auto input = fetchers::Input::fromURL(parsedURL, isFlake);
|
||||
input.parent = baseDir;
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
|
||||
fragment);
|
||||
return std::make_pair(
|
||||
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
|
||||
fragment);
|
||||
}
|
||||
|
||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
using namespace fetchers;
|
||||
|
||||
std::smatch match;
|
||||
|
||||
if (auto res = parseFlakeIdRef(url, isFlake)) {
|
||||
return *res;
|
||||
} else if (auto res = parseURLFlakeRef(url, baseDir, isFlake)) {
|
||||
return *res;
|
||||
} else {
|
||||
return parsePathFlakeRefWithFragment(url, baseDir, allowMissing, isFlake);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -232,10 +272,10 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
|
|||
fetchers::maybeGetStrAttr(attrs, "dir").value_or(""));
|
||||
}
|
||||
|
||||
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||
std::pair<StorePath, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||
{
|
||||
auto [tree, lockedInput] = input.fetch(store);
|
||||
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||
auto [storePath, lockedInput] = input.fetch(store);
|
||||
return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
|
||||
}
|
||||
|
||||
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||
|
@ -246,7 +286,9 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
|
|||
{
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake);
|
||||
return {std::move(flakeRef), fragment, extendedOutputsSpec};
|
||||
return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)};
|
||||
}
|
||||
|
||||
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
|
||||
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "fetchers.hh"
|
||||
#include "outputs-spec.hh"
|
||||
|
||||
#include <regex>
|
||||
#include <variant>
|
||||
|
||||
namespace nix {
|
||||
|
@ -62,7 +63,7 @@ struct FlakeRef
|
|||
|
||||
static FlakeRef fromAttrs(const fetchers::Attrs & attrs);
|
||||
|
||||
std::pair<fetchers::Tree, FlakeRef> fetchTree(ref<Store> store) const;
|
||||
std::pair<StorePath, FlakeRef> fetchTree(ref<Store> store) const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||
|
@ -91,5 +92,7 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
|
|||
bool allowMissing = false,
|
||||
bool isFlake = true);
|
||||
|
||||
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
|
||||
extern std::regex flakeIdRegex;
|
||||
|
||||
}
|
||||
|
|
|
@ -2,8 +2,10 @@
|
|||
#include "store-api.hh"
|
||||
#include "url-parts.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <iomanip>
|
||||
|
||||
#include <iterator>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix::flake {
|
||||
|
@ -45,16 +47,26 @@ StorePath LockedNode::computeStorePath(Store & store) const
|
|||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||
{
|
||||
|
||||
static std::shared_ptr<Node> doFind(const ref<Node>& root, const InputPath & path, std::vector<InputPath>& visited) {
|
||||
auto pos = root;
|
||||
|
||||
auto found = std::find(visited.cbegin(), visited.cend(), path);
|
||||
|
||||
if(found != visited.end()) {
|
||||
std::vector<std::string> cycle;
|
||||
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath);
|
||||
cycle.push_back(printInputPath(path));
|
||||
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
|
||||
}
|
||||
visited.push_back(path);
|
||||
|
||||
for (auto & elem : path) {
|
||||
if (auto i = get(pos->inputs, elem)) {
|
||||
if (auto node = std::get_if<0>(&*i))
|
||||
pos = *node;
|
||||
else if (auto follows = std::get_if<1>(&*i)) {
|
||||
if (auto p = findInput(*follows))
|
||||
if (auto p = doFind(root, *follows, visited))
|
||||
pos = ref(p);
|
||||
else
|
||||
return {};
|
||||
|
@ -66,6 +78,12 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
|||
return pos;
|
||||
}
|
||||
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||
{
|
||||
std::vector<InputPath> visited;
|
||||
return doFind(root, path, visited);
|
||||
}
|
||||
|
||||
LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||
{
|
||||
auto version = json.value("version", 0);
|
||||
|
@ -345,7 +363,7 @@ void LockFile::check()
|
|||
|
||||
for (auto & [inputPath, input] : inputs) {
|
||||
if (auto follows = std::get_if<1>(&input)) {
|
||||
if (!follows->empty() && !get(inputs, *follows))
|
||||
if (!follows->empty() && !findInput(*follows))
|
||||
throw Error("input '%s' follows a non-existent input '%s'",
|
||||
printInputPath(inputPath),
|
||||
printInputPath(*follows));
|
||||
|
|
|
@ -156,7 +156,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
|||
Outputs result;
|
||||
for (auto elem : outTI->listItems()) {
|
||||
if (elem->type() != nString) throw errMsg;
|
||||
auto out = outputs.find(elem->string.s);
|
||||
auto out = outputs.find(elem->c_str());
|
||||
if (out == outputs.end()) throw errMsg;
|
||||
result.insert(*out);
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ std::string DrvInfo::queryMetaString(const std::string & name)
|
|||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v || v->type() != nString) return "";
|
||||
return v->string.s;
|
||||
return v->c_str();
|
||||
}
|
||||
|
||||
|
||||
|
@ -242,7 +242,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
|
|||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
integer meta fields. */
|
||||
if (auto n = string2Int<NixInt>(v->string.s))
|
||||
if (auto n = string2Int<NixInt>(v->c_str()))
|
||||
return *n;
|
||||
}
|
||||
return def;
|
||||
|
@ -256,7 +256,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
|||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
float meta fields. */
|
||||
if (auto n = string2Float<NixFloat>(v->string.s))
|
||||
if (auto n = string2Float<NixFloat>(v->c_str()))
|
||||
return *n;
|
||||
}
|
||||
return def;
|
||||
|
@ -271,8 +271,8 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def)
|
|||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
Boolean meta fields. */
|
||||
if (strcmp(v->string.s, "true") == 0) return true;
|
||||
if (strcmp(v->string.s, "false") == 0) return false;
|
||||
if (v->string_view() == "true") return true;
|
||||
if (v->string_view() == "false") return false;
|
||||
}
|
||||
return def;
|
||||
}
|
||||
|
|
|
@ -76,12 +76,12 @@ void Expr::show(const SymbolTable & symbols, std::ostream & str) const
|
|||
|
||||
void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
str << n;
|
||||
str << v.integer;
|
||||
}
|
||||
|
||||
void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
str << nf;
|
||||
str << v.fpoint;
|
||||
}
|
||||
|
||||
void ExprString::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
|
|
|
@ -20,7 +20,6 @@ MakeError(Abort, EvalError);
|
|||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, Error);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
MakeError(RestrictedPathError, Error);
|
||||
|
||||
/**
|
||||
* Position objects.
|
||||
|
@ -155,6 +154,10 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
|||
|
||||
struct Expr
|
||||
{
|
||||
static unsigned long nrExprs;
|
||||
Expr() {
|
||||
nrExprs++;
|
||||
}
|
||||
virtual ~Expr() { };
|
||||
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
|
||||
virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
|
@ -171,18 +174,16 @@ struct Expr
|
|||
|
||||
struct ExprInt : Expr
|
||||
{
|
||||
NixInt n;
|
||||
Value v;
|
||||
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
||||
ExprInt(NixInt n) { v.mkInt(n); };
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprFloat : Expr
|
||||
{
|
||||
NixFloat nf;
|
||||
Value v;
|
||||
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
||||
ExprFloat(NixFloat nf) { v.mkFloat(nf); };
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
@ -198,9 +199,13 @@ struct ExprString : Expr
|
|||
|
||||
struct ExprPath : Expr
|
||||
{
|
||||
ref<InputAccessor> accessor;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
|
||||
ExprPath(ref<InputAccessor> accessor, std::string s) : accessor(accessor), s(std::move(s))
|
||||
{
|
||||
v.mkPath(&*accessor, this->s.c_str());
|
||||
}
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
@ -238,7 +243,7 @@ struct ExprSelect : Expr
|
|||
PosIdx pos;
|
||||
Expr * e, * def;
|
||||
AttrPath attrPath;
|
||||
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath && attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
|
||||
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
|
||||
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
COMMON_METHODS
|
||||
|
@ -248,7 +253,7 @@ struct ExprOpHasAttr : Expr
|
|||
{
|
||||
Expr * e;
|
||||
AttrPath attrPath;
|
||||
ExprOpHasAttr(Expr * e, const AttrPath && attrPath) : e(e), attrPath(std::move(attrPath)) { };
|
||||
ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { };
|
||||
PosIdx getPos() const override { return e->getPos(); }
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
|
|
@ -520,7 +520,7 @@ path_start
|
|||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
$$ = new ExprPath(path);
|
||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
||||
}
|
||||
| HPATH {
|
||||
if (evalSettings.pureEval) {
|
||||
|
@ -530,7 +530,7 @@ path_start
|
|||
);
|
||||
}
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(path);
|
||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
||||
}
|
||||
;
|
||||
|
||||
|
@ -646,13 +646,16 @@ formal
|
|||
|
||||
#include "eval.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "tarball.hh"
|
||||
#include "store-api.hh"
|
||||
#include "flake/flake.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "memory-input-accessor.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
unsigned long Expr::nrExprs = 0;
|
||||
|
||||
Expr * EvalState::parse(
|
||||
char * text,
|
||||
|
@ -736,12 +739,6 @@ Expr * EvalState::parseStdin()
|
|||
}
|
||||
|
||||
|
||||
void EvalState::addToSearchPath(SearchPath::Elem && elem)
|
||||
{
|
||||
searchPath.elements.emplace_back(std::move(elem));
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
|
@ -761,11 +758,11 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
|
|||
auto r = *rOpt;
|
||||
|
||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||
if (pathExists(res)) return CanonPath(canonPath(res));
|
||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return CanonPath(concatStrings(corepkgsPrefix, path.substr(4)));
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
debugThrow(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
|
@ -788,7 +785,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
|||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).tree.storePath;
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||
res = { store->toRealPath(storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
|
@ -802,7 +799,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
|
|||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
||||
debug("fetching flake search path element '%s''", value);
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||
res = { store->toRealPath(storePath) };
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
#include "eval.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
SourcePath EvalState::rootPath(CanonPath path)
|
||||
{
|
||||
return std::move(path);
|
||||
return {rootFS, std::move(path)};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
.drvPath = b.drvPath,
|
||||
.outputs = OutputsSpec::Names { b.output },
|
||||
});
|
||||
ensureValid(b.drvPath);
|
||||
ensureValid(b.drvPath->getBaseStorePath());
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
auto ctxS = store->printStorePath(o.path);
|
||||
|
@ -69,7 +69,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
res.insert_or_assign(ctxS, ctxS);
|
||||
ensureValid(d.drvPath);
|
||||
},
|
||||
}, c.raw());
|
||||
}, c.raw);
|
||||
}
|
||||
|
||||
if (drvs.empty()) return {};
|
||||
|
@ -77,7 +77,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
if (!evalSettings.enableImportFromDerivation)
|
||||
debugThrowLastTrace(Error(
|
||||
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
|
||||
store->printStorePath(drvs.begin()->drvPath)));
|
||||
drvs.begin()->to_string(*store)));
|
||||
|
||||
/* Build/substitute the context. */
|
||||
std::vector<DerivedPath> buildReqs;
|
||||
|
@ -95,7 +95,11 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
/* Get all the output paths corresponding to the placeholders we had */
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
res.insert_or_assign(
|
||||
DownstreamPlaceholder::unknownCaOutput(drv.drvPath, outputName).render(),
|
||||
DownstreamPlaceholder::fromSingleDerivedPathBuilt(
|
||||
SingleDerivedPath::Built {
|
||||
.drvPath = drv.drvPath,
|
||||
.output = outputName,
|
||||
}).render(),
|
||||
store->printStorePath(outputPath)
|
||||
);
|
||||
}
|
||||
|
@ -117,13 +121,15 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, co
|
|||
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
|
||||
|
||||
try {
|
||||
StringMap rewrites = state.realiseContext(context);
|
||||
|
||||
auto realPath = state.rootPath(CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context)));
|
||||
if (!context.empty()) {
|
||||
auto rewrites = state.realiseContext(context);
|
||||
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
|
||||
return {path.accessor, CanonPath(realPath)};
|
||||
}
|
||||
|
||||
return flags.checkForPureEval
|
||||
? state.checkSourcePath(realPath)
|
||||
: realPath;
|
||||
? state.checkSourcePath(path)
|
||||
: path;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
|
||||
throw;
|
||||
|
@ -152,8 +158,10 @@ static void mkOutputString(
|
|||
{
|
||||
state.mkOutputString(
|
||||
attrs.alloc(o.first),
|
||||
drvPath,
|
||||
o.first,
|
||||
SingleDerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.output = o.first,
|
||||
},
|
||||
o.second.path(*state.store, Derivation::nameFromPath(drvPath), o.first));
|
||||
}
|
||||
|
||||
|
@ -196,7 +204,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
|||
state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "imported-drv-to-derivation.nix.gen.hh"
|
||||
, CanonPath::root), **state.vImportedDrvToDerivation);
|
||||
, state.rootPath(CanonPath::root)), **state.vImportedDrvToDerivation);
|
||||
}
|
||||
|
||||
state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh");
|
||||
|
@ -204,12 +212,6 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
|||
state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh");
|
||||
}
|
||||
|
||||
else if (path2 == corepkgsPrefix + "fetchurl.nix") {
|
||||
state.eval(state.parseExprFromString(
|
||||
#include "fetchurl.nix.gen.hh"
|
||||
, CanonPath::root), v);
|
||||
}
|
||||
|
||||
else {
|
||||
if (!vScope)
|
||||
state.evalFile(path, v);
|
||||
|
@ -252,64 +254,71 @@ static RegisterPrimOp primop_import({
|
|||
.args = {"path"},
|
||||
// TODO turn "normal path values" into link below
|
||||
.doc = R"(
|
||||
Load, parse and return the Nix expression in the file *path*.
|
||||
|
||||
The value *path* can be a path, a string, or an attribute set with an
|
||||
`__toString` attribute or a `outPath` attribute (as derivations or flake
|
||||
inputs typically have).
|
||||
|
||||
If *path* is a directory, the file `default.nix` in that directory
|
||||
is loaded.
|
||||
|
||||
Evaluation aborts if the file doesn’t exist or contains
|
||||
an incorrect Nix expression. `import` implements Nix’s module
|
||||
system: you can put any Nix expression (such as a set or a
|
||||
function) in a separate file, and use it from Nix expressions in
|
||||
other files.
|
||||
Load, parse, and return the Nix expression in the file *path*.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> Unlike some languages, `import` is a regular function in Nix.
|
||||
> Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
|
||||
> are normal [path values](@docroot@/language/values.md#type-path).
|
||||
|
||||
A Nix expression loaded by `import` must not contain any *free
|
||||
variables* (identifiers that are not defined in the Nix expression
|
||||
itself and are not built-in). Therefore, it cannot refer to
|
||||
variables that are in scope at the call site. For instance, if you
|
||||
have a calling expression
|
||||
The *path* argument must meet the same criteria as an [interpolated expression](@docroot@/language/string-interpolation.md#interpolated-expression).
|
||||
|
||||
```nix
|
||||
rec {
|
||||
x = 123;
|
||||
y = import ./foo.nix;
|
||||
}
|
||||
```
|
||||
If *path* is a directory, the file `default.nix` in that directory is used if it exists.
|
||||
|
||||
then the following `foo.nix` will give an error:
|
||||
> **Example**
|
||||
>
|
||||
> ```console
|
||||
> $ echo 123 > default.nix
|
||||
> ```
|
||||
>
|
||||
> Import `default.nix` from the current directory.
|
||||
>
|
||||
> ```nix
|
||||
> import ./.
|
||||
> ```
|
||||
>
|
||||
> 123
|
||||
|
||||
```nix
|
||||
x + 456
|
||||
```
|
||||
Evaluation aborts if the file doesn’t exist or contains an invalid Nix expression.
|
||||
|
||||
since `x` is not in scope in `foo.nix`. If you want `x` to be
|
||||
available in `foo.nix`, you should pass it as a function argument:
|
||||
A Nix expression loaded by `import` must not contain any *free variables*, that is, identifiers that are not defined in the Nix expression itself and are not built-in.
|
||||
Therefore, it cannot refer to variables that are in scope at the call site.
|
||||
|
||||
```nix
|
||||
rec {
|
||||
x = 123;
|
||||
y = import ./foo.nix x;
|
||||
}
|
||||
```
|
||||
|
||||
and
|
||||
|
||||
```nix
|
||||
x: x + 456
|
||||
```
|
||||
|
||||
(The function argument doesn’t have to be called `x` in `foo.nix`;
|
||||
any name would work.)
|
||||
> **Example**
|
||||
>
|
||||
> If you have a calling expression
|
||||
>
|
||||
> ```nix
|
||||
> rec {
|
||||
> x = 123;
|
||||
> y = import ./foo.nix;
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> then the following `foo.nix` will give an error:
|
||||
>
|
||||
> ```nix
|
||||
> # foo.nix
|
||||
> x + 456
|
||||
> ```
|
||||
>
|
||||
> since `x` is not in scope in `foo.nix`.
|
||||
> If you want `x` to be available in `foo.nix`, pass it as a function argument:
|
||||
>
|
||||
> ```nix
|
||||
> rec {
|
||||
> x = 123;
|
||||
> y = import ./foo.nix x;
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> and
|
||||
>
|
||||
> ```nix
|
||||
> # foo.nix
|
||||
> x: x + 456
|
||||
> ```
|
||||
>
|
||||
> The function argument doesn’t have to be called `x` in `foo.nix`; any name would work.
|
||||
)",
|
||||
.fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
|
@ -584,9 +593,12 @@ struct CompareValues
|
|||
case nFloat:
|
||||
return v1->fpoint < v2->fpoint;
|
||||
case nString:
|
||||
return strcmp(v1->string.s, v2->string.s) < 0;
|
||||
return v1->string_view().compare(v2->string_view()) < 0;
|
||||
case nPath:
|
||||
return strcmp(v1->_path, v2->_path) < 0;
|
||||
// Note: we don't take the accessor into account
|
||||
// since it's not obvious how to compare them in a
|
||||
// reproducible way.
|
||||
return strcmp(v1->_path.path, v2->_path.path) < 0;
|
||||
case nList:
|
||||
// Lexicographic comparison
|
||||
for (size_t i = 0;; i++) {
|
||||
|
@ -721,6 +733,14 @@ static RegisterPrimOp primop_genericClosure(PrimOp {
|
|||
```
|
||||
[ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
|
||||
```
|
||||
|
||||
`key` can be one of the following types:
|
||||
- [Number](@docroot@/language/values.md#type-number)
|
||||
- [Boolean](@docroot@/language/values.md#type-boolean)
|
||||
- [String](@docroot@/language/values.md#type-string)
|
||||
- [Path](@docroot@/language/values.md#type-path)
|
||||
- [List](@docroot@/language/values.md#list)
|
||||
|
||||
)",
|
||||
.fun = prim_genericClosure,
|
||||
});
|
||||
|
@ -976,7 +996,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
{
|
||||
state.forceValue(*args[0], pos);
|
||||
if (args[0]->type() == nString)
|
||||
printError("trace: %1%", args[0]->string.s);
|
||||
printError("trace: %1%", args[0]->string_view());
|
||||
else
|
||||
printError("trace: %1%", printValue(state, *args[0]));
|
||||
state.forceValue(*args[1], pos);
|
||||
|
@ -1246,17 +1266,18 @@ drvName, Bindings * attrs, Value & v)
|
|||
state.store->computeFSClosure(d.drvPath, refs);
|
||||
for (auto & j : refs) {
|
||||
drv.inputSrcs.insert(j);
|
||||
if (j.isDerivation())
|
||||
drv.inputDrvs[j] = state.store->readDerivation(j).outputNames();
|
||||
if (j.isDerivation()) {
|
||||
drv.inputDrvs.map[j].value = state.store->readDerivation(j).outputNames();
|
||||
}
|
||||
}
|
||||
},
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
drv.inputDrvs[b.drvPath].insert(b.output);
|
||||
drv.inputDrvs.ensureSlot(*b.drvPath).value.insert(b.output);
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
drv.inputSrcs.insert(o.path);
|
||||
},
|
||||
}, c.raw());
|
||||
}, c.raw);
|
||||
}
|
||||
|
||||
/* Do we have all required attributes? */
|
||||
|
@ -1325,13 +1346,13 @@ drvName, Bindings * attrs, Value & v)
|
|||
if (isImpure)
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::Impure {
|
||||
.method = method.raw,
|
||||
.method = method,
|
||||
.hashType = ht,
|
||||
});
|
||||
else
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::CAFloating {
|
||||
.method = method.raw,
|
||||
.method = method,
|
||||
.hashType = ht,
|
||||
});
|
||||
}
|
||||
|
@ -1364,7 +1385,7 @@ drvName, Bindings * attrs, Value & v)
|
|||
drv.env[i] = state.store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign(
|
||||
i,
|
||||
DerivationOutputInputAddressed {
|
||||
DerivationOutput::InputAddressed {
|
||||
.path = std::move(outPath),
|
||||
});
|
||||
}
|
||||
|
@ -1372,7 +1393,7 @@ drvName, Bindings * attrs, Value & v)
|
|||
;
|
||||
case DrvHash::Kind::Deferred:
|
||||
for (auto & i : outputs) {
|
||||
drv.outputs.insert_or_assign(i, DerivationOutputDeferred {});
|
||||
drv.outputs.insert_or_assign(i, DerivationOutput::Deferred {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1471,7 +1492,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
|||
}));
|
||||
|
||||
NixStringContext context;
|
||||
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath")).path;
|
||||
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path;
|
||||
/* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink
|
||||
directly in the store. The latter condition is necessary so
|
||||
e.g. nix-push does the right thing. */
|
||||
|
@ -1511,15 +1532,27 @@ static RegisterPrimOp primop_storePath({
|
|||
|
||||
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto & arg = *args[0];
|
||||
|
||||
/* We don’t check the path right now, because we don’t want to
|
||||
throw if the path isn’t allowed, but just return false (and we
|
||||
can’t just catch the exception here because we still want to
|
||||
throw if something in the evaluation of `*args[0]` tries to
|
||||
throw if something in the evaluation of `arg` tries to
|
||||
access an unauthorized path). */
|
||||
auto path = realisePath(state, pos, *args[0], { .checkForPureEval = false });
|
||||
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
|
||||
|
||||
/* SourcePath doesn't know about trailing slash. */
|
||||
auto mustBeDir = arg.type() == nString
|
||||
&& (arg.string_view().ends_with("/")
|
||||
|| arg.string_view().ends_with("/."));
|
||||
|
||||
try {
|
||||
v.mkBool(state.checkSourcePath(path).pathExists());
|
||||
auto checked = state.checkSourcePath(path);
|
||||
auto exists = checked.pathExists();
|
||||
if (exists && mustBeDir) {
|
||||
exists = checked.lstat().type == InputAccessor::tDirectory;
|
||||
}
|
||||
v.mkBool(exists);
|
||||
} catch (SysError & e) {
|
||||
/* Don't give away info from errors while canonicalising
|
||||
‘path’ in restricted mode. */
|
||||
|
@ -1672,13 +1705,14 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
|
||||
static RegisterPrimOp primop_findFile(PrimOp {
|
||||
.name = "__findFile",
|
||||
.args = {"search path", "lookup path"},
|
||||
.args = {"search-path", "lookup-path"},
|
||||
.doc = R"(
|
||||
Look up the given path with the given search path.
|
||||
Find *lookup-path* in *search-path*.
|
||||
|
||||
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes, `prefix`, and `path`.
|
||||
`prefix` is a relative path.
|
||||
`path` denotes a file system location; the exact syntax depends on the command line interface.
|
||||
A search path is represented list of [attribute sets](./values.md#attribute-set) with two attributes:
|
||||
- `prefix` is a relative path.
|
||||
- `path` denotes a file system location
|
||||
The exact syntax depends on the command line interface.
|
||||
|
||||
Examples of search path attribute sets:
|
||||
|
||||
|
@ -1696,15 +1730,14 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
|||
}
|
||||
```
|
||||
|
||||
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match.
|
||||
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/values.html#type-path) of the match:
|
||||
|
||||
This is the process for each entry:
|
||||
If the lookup path matches `prefix`, then the remainder of the lookup path (the "suffix") is searched for within the directory denoted by `patch`.
|
||||
Note that the `path` may need to be downloaded at this point to look inside.
|
||||
If the suffix is found inside that directory, then the entry is a match;
|
||||
the combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
||||
- If *lookup-path* matches `prefix`, then the remainder of *lookup-path* (the "suffix") is searched for within the directory denoted by `path`.
|
||||
Note that the `path` may need to be downloaded at this point to look inside.
|
||||
- If the suffix is found inside that directory, then the entry is a match.
|
||||
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
||||
|
||||
The syntax
|
||||
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
|
||||
|
||||
```nix
|
||||
<nixpkgs>
|
||||
|
@ -1732,7 +1765,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
|
||||
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashFile({
|
||||
|
@ -1829,6 +1862,45 @@ static RegisterPrimOp primop_readDir({
|
|||
.fun = prim_readDir,
|
||||
});
|
||||
|
||||
/* Extend single element string context with another output. */
|
||||
static void prim_outputOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
SingleDerivedPath drvPath = state.coerceToSingleDerivedPath(pos, *args[0], "while evaluating the first argument to builtins.outputOf");
|
||||
|
||||
OutputNameView outputName = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument to builtins.outputOf");
|
||||
|
||||
state.mkSingleDerivedPathString(
|
||||
SingleDerivedPath::Built {
|
||||
.drvPath = make_ref<SingleDerivedPath>(drvPath),
|
||||
.output = std::string { outputName },
|
||||
},
|
||||
v);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_outputOf({
|
||||
.name = "__outputOf",
|
||||
.args = {"derivation-reference", "output-name"},
|
||||
.doc = R"(
|
||||
Return the output path of a derivation, literally or using a placeholder if needed.
|
||||
|
||||
If the derivation has a statically-known output path (i.e. the derivation output is input-addressed, or fixed content-addresed), the output path will just be returned.
|
||||
But if the derivation is content-addressed or if the derivation is itself not-statically produced (i.e. is the output of another derivation), a placeholder will be returned instead.
|
||||
|
||||
*`derivation reference`* must be a string that may contain a regular store path to a derivation, or may be a placeholder reference. If the derivation is produced by a derivation, you must explicitly select `drv.outPath`.
|
||||
This primop can be chained arbitrarily deeply.
|
||||
For instance,
|
||||
```nix
|
||||
builtins.outputOf
|
||||
(builtins.outputOf myDrv "out)
|
||||
"out"
|
||||
```
|
||||
will return a placeholder for the output of the output of `myDrv`.
|
||||
|
||||
This primop corresponds to the `^` sigil for derivable paths, e.g. as part of installable syntax on the command line.
|
||||
)",
|
||||
.fun = prim_outputOf,
|
||||
.experimentalFeature = Xp::DynamicDerivations,
|
||||
});
|
||||
|
||||
/*************************************************************
|
||||
* Creating files
|
||||
|
@ -2006,7 +2078,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
StorePathSet refs;
|
||||
|
||||
for (auto c : context) {
|
||||
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c))
|
||||
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c.raw))
|
||||
refs.insert(p->path);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
|
@ -2138,7 +2210,7 @@ static void addPath(
|
|||
|
||||
path = evalSettings.pureEval && expectedHash
|
||||
? path
|
||||
: state.checkSourcePath(CanonPath(path)).path.abs();
|
||||
: state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs();
|
||||
|
||||
PathFilter filter = filterFun ? ([&](const Path & path) {
|
||||
auto st = lstat(path);
|
||||
|
@ -2171,9 +2243,7 @@ static void addPath(
|
|||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
StorePath dstPath = settings.readOnlyMode
|
||||
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
|
||||
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs);
|
||||
auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
|
@ -2190,7 +2260,7 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
|
|||
{
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(pos, *args[1], context,
|
||||
"while evaluating the second argument (the path to filter) passed to builtins.filterSource");
|
||||
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
|
||||
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
|
||||
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
|
||||
}
|
||||
|
@ -2344,7 +2414,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args,
|
|||
(v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]);
|
||||
|
||||
std::sort(v.listElems(), v.listElems() + n,
|
||||
[](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; });
|
||||
[](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; });
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_attrNames({
|
||||
|
@ -2485,7 +2555,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args
|
|||
names.reserve(args[1]->listSize());
|
||||
for (auto elem : args[1]->listItems()) {
|
||||
state.forceStringNoCtx(*elem, pos, "while evaluating the values of the second argument passed to builtins.removeAttrs");
|
||||
names.emplace_back(state.symbols.create(elem->string.s), nullptr);
|
||||
names.emplace_back(state.symbols.create(elem->string_view()), nullptr);
|
||||
}
|
||||
std::sort(names.begin(), names.end());
|
||||
|
||||
|
@ -2935,7 +3005,7 @@ static RegisterPrimOp primop_tail({
|
|||
.name = "__tail",
|
||||
.args = {"list"},
|
||||
.doc = R"(
|
||||
Return the second to last elements of a list; abort evaluation if
|
||||
Return the list without its first item; abort evaluation if
|
||||
the argument isn’t a list or is an empty list.
|
||||
|
||||
> **Warning**
|
||||
|
@ -3695,7 +3765,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
|
|||
NixStringContext context; // discarded
|
||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||
|
||||
v.mkString(hashString(*ht, s).to_string(Base16, false));
|
||||
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hashString({
|
||||
|
@ -3709,6 +3779,101 @@ static RegisterPrimOp primop_hashString({
|
|||
.fun = prim_hashString,
|
||||
});
|
||||
|
||||
static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.convertHash");
|
||||
auto &inputAttrs = args[0]->attrs;
|
||||
|
||||
Bindings::iterator iteratorHash = getAttr(state, state.symbols.create("hash"), inputAttrs, "while locating the attribute 'hash'");
|
||||
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
|
||||
|
||||
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
|
||||
std::optional<HashType> ht = std::nullopt;
|
||||
if (iteratorHashAlgo != inputAttrs->end()) {
|
||||
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
|
||||
}
|
||||
|
||||
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
|
||||
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
|
||||
|
||||
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_convertHash({
|
||||
.name = "__convertHash",
|
||||
.args = {"args"},
|
||||
.doc = R"(
|
||||
Return the specified representation of a hash string, based on the attributes presented in *args*:
|
||||
|
||||
- `hash`
|
||||
|
||||
The hash to be converted.
|
||||
The hash format is detected automatically.
|
||||
|
||||
- `hashAlgo`
|
||||
|
||||
The algorithm used to create the hash. Must be one of
|
||||
- `"md5"`
|
||||
- `"sha1"`
|
||||
- `"sha256"`
|
||||
- `"sha512"`
|
||||
|
||||
The attribute may be omitted when `hash` is an [SRI hash](https://www.w3.org/TR/SRI/#the-integrity-attribute) or when the hash is prefixed with the hash algorithm name followed by a colon.
|
||||
That `<hashAlgo>:<hashBody>` syntax is supported for backwards compatibility with existing tooling.
|
||||
|
||||
- `toHashFormat`
|
||||
|
||||
The format of the resulting hash. Must be one of
|
||||
- `"base16"`
|
||||
- `"base32"`
|
||||
- `"base64"`
|
||||
- `"sri"`
|
||||
|
||||
The result hash is the *toHashFormat* representation of the hash *hash*.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> Convert a SHA256 hash in Base16 to SRI:
|
||||
>
|
||||
> ```nix
|
||||
> builtins.convertHash {
|
||||
> hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
|
||||
> toHashFormat = "sri";
|
||||
> hashAlgo = "sha256";
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> Convert a SHA256 hash in SRI to Base16:
|
||||
>
|
||||
> ```nix
|
||||
> builtins.convertHash {
|
||||
> hash = "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=";
|
||||
> toHashFormat = "base16";
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> Convert a hash in the form `<hashAlgo>:<hashBody>` in Base16 to SRI:
|
||||
>
|
||||
> ```nix
|
||||
> builtins.convertHash {
|
||||
> hash = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
|
||||
> toHashFormat = "sri";
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="
|
||||
)",
|
||||
.fun = prim_convertHash,
|
||||
});
|
||||
|
||||
struct RegexCache
|
||||
{
|
||||
// TODO use C++20 transparent comparison when available
|
||||
|
@ -4331,9 +4496,9 @@ void EvalState::createBaseEnv()
|
|||
addConstant("__nixPath", v, {
|
||||
.type = nList,
|
||||
.doc = R"(
|
||||
The search path used to resolve angle bracket path lookups.
|
||||
List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
|
||||
|
||||
Angle bracket expressions can be
|
||||
Lookup path expressions can be
|
||||
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
|
||||
using this and
|
||||
[`builtins.findFile`](./builtins.html#builtins-findFile):
|
||||
|
@ -4377,12 +4542,7 @@ void EvalState::createBaseEnv()
|
|||
|
||||
/* Note: we have to initialize the 'derivation' constant *after*
|
||||
building baseEnv/staticBaseEnv because it uses 'builtins'. */
|
||||
char code[] =
|
||||
#include "primops/derivation.nix.gen.hh"
|
||||
// the parser needs two NUL bytes as terminators; one of them
|
||||
// is implied by being a C string.
|
||||
"\0";
|
||||
eval(parse(code, sizeof(code), derivationInternal, {CanonPath::root}, staticBaseEnv), *vDerivation);
|
||||
evalFile(derivationInternal, *vDerivation);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -51,13 +51,13 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
|
|||
|
||||
NixStringContext context2;
|
||||
for (auto && c : context) {
|
||||
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
|
||||
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c.raw)) {
|
||||
context2.emplace(NixStringContextElem::Opaque {
|
||||
.path = ptr->drvPath
|
||||
});
|
||||
} else {
|
||||
/* Can reuse original item */
|
||||
context2.emplace(std::move(c));
|
||||
context2.emplace(std::move(c).raw);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,12 +106,15 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
|||
contextInfos[std::move(d.drvPath)].allOutputs = true;
|
||||
},
|
||||
[&](NixStringContextElem::Built && b) {
|
||||
contextInfos[std::move(b.drvPath)].outputs.emplace_back(std::move(b.output));
|
||||
// FIXME should eventually show string context as is, no
|
||||
// resolving here.
|
||||
auto drvPath = resolveDerivedPath(*state.store, *b.drvPath);
|
||||
contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output));
|
||||
},
|
||||
[&](NixStringContextElem::Opaque && o) {
|
||||
contextInfos[std::move(o.path)].path = true;
|
||||
},
|
||||
}, ((NixStringContextElem &&) i).raw());
|
||||
}, ((NixStringContextElem &&) i).raw);
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(contextInfos.size());
|
||||
|
@ -222,7 +225,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
|||
for (auto elem : iter->value->listItems()) {
|
||||
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
||||
context.emplace(NixStringContextElem::Built {
|
||||
.drvPath = namePath,
|
||||
.drvPath = makeConstantStorePathRef(namePath),
|
||||
.output = std::string { outputName },
|
||||
});
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
|||
|
||||
else if (attrName == "toPath") {
|
||||
state.forceValue(*attr.value, attr.pos);
|
||||
bool isEmptyString = attr.value->type() == nString && attr.value->string.s == std::string("");
|
||||
bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == "";
|
||||
if (isEmptyString) {
|
||||
toPath = StorePathOrGap {};
|
||||
}
|
||||
|
|
|
@ -71,10 +71,10 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
|
||||
// FIXME: use name
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
auto [storePath, input2] = input.fetch(state.store);
|
||||
|
||||
auto attrs2 = state.buildBindings(8);
|
||||
state.mkStorePathString(tree.storePath, attrs2.alloc(state.sOutPath));
|
||||
state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));
|
||||
if (input2.getRef())
|
||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
|
@ -86,7 +86,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
attrs2.alloc("revCount").mkInt(*revCount);
|
||||
v.mkAttrs(attrs2);
|
||||
|
||||
state.allowPath(tree.storePath);
|
||||
state.allowPath(storePath);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r_fetchMercurial({
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue